sha
stringlengths
40
40
text
stringlengths
1
13.4M
id
stringlengths
2
117
tags
listlengths
1
7.91k
created_at
stringlengths
25
25
metadata
stringlengths
2
875k
last_modified
stringlengths
25
25
arxiv
listlengths
0
25
languages
listlengths
0
7.91k
tags_str
stringlengths
17
159k
text_str
stringlengths
1
447k
text_lists
listlengths
0
352
processed_texts
listlengths
1
353
tokens_length
listlengths
1
353
input_texts
listlengths
1
40
de49c8091be82bc96b65cb47c7375b99c2ec7349
# Dataset Card for Evaluation run of Locutusque/LocutusqueXFelladrin-TinyMistral248M-Instruct <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Locutusque/LocutusqueXFelladrin-TinyMistral248M-Instruct](https://huggingface.co/Locutusque/LocutusqueXFelladrin-TinyMistral248M-Instruct) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Locutusque__LocutusqueXFelladrin-TinyMistral248M-Instruct", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T13:05:29.280274](https://huggingface.co/datasets/open-llm-leaderboard/details_Locutusque__LocutusqueXFelladrin-TinyMistral248M-Instruct/blob/main/results_2023-12-16T13-05-29.280274.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.2598863864332701, "acc_stderr": 0.03085871471372819, "acc_norm": 0.2612223474549382, "acc_norm_stderr": 0.03168256031998437, "mc1": 0.204406364749082, "mc1_stderr": 0.014117174337432621, "mc2": 0.40124313581017795, "mc2_stderr": 0.01490869512458324 }, "harness|arc:challenge|25": { "acc": 0.19965870307167236, "acc_stderr": 0.011681625756888676, "acc_norm": 0.24744027303754265, "acc_norm_stderr": 0.01261035266329267 }, "harness|hellaswag|10": { "acc": 0.2757418840868353, "acc_stderr": 0.004459740315490862, "acc_norm": 0.2779326827325234, "acc_norm_stderr": 0.004470644845242891 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.26, "acc_stderr": 0.04408440022768081, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768081 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.1925925925925926, "acc_stderr": 0.03406542058502652, "acc_norm": 0.1925925925925926, "acc_norm_stderr": 0.03406542058502652 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.2236842105263158, "acc_stderr": 0.03391160934343604, "acc_norm": 0.2236842105263158, "acc_norm_stderr": 0.03391160934343604 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.17, "acc_stderr": 0.0377525168068637, "acc_norm": 0.17, "acc_norm_stderr": 0.0377525168068637 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.2792452830188679, "acc_stderr": 0.02761116340239972, "acc_norm": 0.2792452830188679, "acc_norm_stderr": 0.02761116340239972 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2569444444444444, "acc_stderr": 0.03653946969442099, "acc_norm": 0.2569444444444444, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.26011560693641617, "acc_stderr": 0.033450369167889904, "acc_norm": 0.26011560693641617, "acc_norm_stderr": 0.033450369167889904 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.20588235294117646, "acc_stderr": 0.04023382273617749, "acc_norm": 0.20588235294117646, "acc_norm_stderr": 0.04023382273617749 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.18, "acc_stderr": 0.03861229196653695, "acc_norm": 0.18, "acc_norm_stderr": 0.03861229196653695 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.3148936170212766, "acc_stderr": 0.030363582197238167, "acc_norm": 0.3148936170212766, "acc_norm_stderr": 0.030363582197238167 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.32456140350877194, "acc_stderr": 0.04404556157374768, "acc_norm": 0.32456140350877194, "acc_norm_stderr": 0.04404556157374768 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2413793103448276, "acc_stderr": 0.03565998174135303, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.03565998174135303 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2724867724867725, "acc_stderr": 0.022930973071633345, "acc_norm": 0.2724867724867725, "acc_norm_stderr": 0.022930973071633345 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.23809523809523808, "acc_stderr": 0.03809523809523812, "acc_norm": 0.23809523809523808, "acc_norm_stderr": 0.03809523809523812 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.24193548387096775, "acc_stderr": 0.02436259969303109, "acc_norm": 0.24193548387096775, "acc_norm_stderr": 0.02436259969303109 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.2857142857142857, "acc_stderr": 0.031785297106427496, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.031785297106427496 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.23, "acc_stderr": 0.04229525846816505, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816505 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.21818181818181817, "acc_stderr": 0.032250781083062896, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.032250781083062896 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.37373737373737376, "acc_stderr": 0.034468977386593325, "acc_norm": 0.37373737373737376, "acc_norm_stderr": 0.034468977386593325 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.3626943005181347, "acc_stderr": 0.034697137917043715, "acc_norm": 0.3626943005181347, "acc_norm_stderr": 0.034697137917043715 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.33589743589743587, "acc_stderr": 0.023946724741563976, "acc_norm": 0.33589743589743587, "acc_norm_stderr": 0.023946724741563976 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.02671924078371218, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.02671924078371218 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.3277310924369748, "acc_stderr": 0.030489911417673227, "acc_norm": 0.3277310924369748, "acc_norm_stderr": 0.030489911417673227 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.19205298013245034, "acc_stderr": 0.03216298420593613, "acc_norm": 0.19205298013245034, "acc_norm_stderr": 0.03216298420593613 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.30642201834862387, "acc_stderr": 0.019765517220458523, "acc_norm": 0.30642201834862387, "acc_norm_stderr": 0.019765517220458523 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.3101851851851852, "acc_stderr": 0.031546962856566295, "acc_norm": 0.3101851851851852, "acc_norm_stderr": 0.031546962856566295 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.29411764705882354, "acc_stderr": 0.03198001660115071, "acc_norm": 0.29411764705882354, "acc_norm_stderr": 0.03198001660115071 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.2109704641350211, "acc_stderr": 0.026558372502661923, "acc_norm": 0.2109704641350211, "acc_norm_stderr": 0.026558372502661923 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.22869955156950672, "acc_stderr": 0.028188240046929196, "acc_norm": 0.22869955156950672, "acc_norm_stderr": 0.028188240046929196 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2824427480916031, "acc_stderr": 0.03948406125768361, "acc_norm": 0.2824427480916031, "acc_norm_stderr": 0.03948406125768361 }, "harness|hendrycksTest-international_law|5": { "acc": 0.11570247933884298, "acc_stderr": 0.0291998024556228, "acc_norm": 0.11570247933884298, "acc_norm_stderr": 0.0291998024556228 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.2777777777777778, "acc_stderr": 0.043300437496507416, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.043300437496507416 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22699386503067484, "acc_stderr": 0.03291099578615769, "acc_norm": 0.22699386503067484, "acc_norm_stderr": 0.03291099578615769 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.29464285714285715, "acc_stderr": 0.04327040932578729, "acc_norm": 0.29464285714285715, "acc_norm_stderr": 0.04327040932578729 }, "harness|hendrycksTest-management|5": { "acc": 0.44660194174757284, "acc_stderr": 0.04922424153458933, "acc_norm": 0.44660194174757284, "acc_norm_stderr": 0.04922424153458933 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2094017094017094, "acc_stderr": 0.026655699653922737, "acc_norm": 0.2094017094017094, "acc_norm_stderr": 0.026655699653922737 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.21966794380587484, "acc_stderr": 0.014805384478371176, "acc_norm": 0.21966794380587484, "acc_norm_stderr": 0.014805384478371176 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.2138728323699422, "acc_stderr": 0.022075709251757183, "acc_norm": 0.2138728323699422, "acc_norm_stderr": 0.022075709251757183 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2636871508379888, "acc_stderr": 0.01473692638376196, "acc_norm": 0.2636871508379888, "acc_norm_stderr": 0.01473692638376196 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.24836601307189543, "acc_stderr": 0.024739981355113596, "acc_norm": 0.24836601307189543, "acc_norm_stderr": 0.024739981355113596 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.2861736334405145, "acc_stderr": 0.025670259242188936, "acc_norm": 0.2861736334405145, "acc_norm_stderr": 0.025670259242188936 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.24074074074074073, "acc_stderr": 0.023788583551658533, "acc_norm": 0.24074074074074073, "acc_norm_stderr": 0.023788583551658533 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.24113475177304963, "acc_stderr": 0.02551873104953777, "acc_norm": 0.24113475177304963, "acc_norm_stderr": 0.02551873104953777 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2438070404172099, "acc_stderr": 0.010966507972178477, "acc_norm": 0.2438070404172099, "acc_norm_stderr": 0.010966507972178477 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.3713235294117647, "acc_stderr": 0.02934980313976587, "acc_norm": 0.3713235294117647, "acc_norm_stderr": 0.02934980313976587 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.25, "acc_stderr": 0.01751781884501444, "acc_norm": 0.25, "acc_norm_stderr": 0.01751781884501444 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.2636363636363636, "acc_stderr": 0.04220224692971987, "acc_norm": 0.2636363636363636, "acc_norm_stderr": 0.04220224692971987 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.2857142857142857, "acc_stderr": 0.028920583220675578, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.028920583220675578 }, "harness|hendrycksTest-sociology|5": { "acc": 0.2736318407960199, "acc_stderr": 0.03152439186555401, "acc_norm": 0.2736318407960199, "acc_norm_stderr": 0.03152439186555401 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.23, "acc_stderr": 0.04229525846816505, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816505 }, "harness|hendrycksTest-virology|5": { "acc": 0.24096385542168675, "acc_stderr": 0.03329394119073529, "acc_norm": 0.24096385542168675, "acc_norm_stderr": 0.03329394119073529 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.19298245614035087, "acc_stderr": 0.030267457554898465, "acc_norm": 0.19298245614035087, "acc_norm_stderr": 0.030267457554898465 }, "harness|truthfulqa:mc|0": { "mc1": 0.204406364749082, "mc1_stderr": 0.014117174337432621, "mc2": 0.40124313581017795, "mc2_stderr": 0.01490869512458324 }, "harness|winogrande|5": { "acc": 0.4909234411996843, "acc_stderr": 0.014050170094497704 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Locutusque__LocutusqueXFelladrin-TinyMistral248M-Instruct
[ "region:us" ]
2023-12-16T13:08:21+00:00
{"pretty_name": "Evaluation run of Locutusque/LocutusqueXFelladrin-TinyMistral248M-Instruct", "dataset_summary": "Dataset automatically created during the evaluation run of model [Locutusque/LocutusqueXFelladrin-TinyMistral248M-Instruct](https://huggingface.co/Locutusque/LocutusqueXFelladrin-TinyMistral248M-Instruct) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Locutusque__LocutusqueXFelladrin-TinyMistral248M-Instruct\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T13:05:29.280274](https://huggingface.co/datasets/open-llm-leaderboard/details_Locutusque__LocutusqueXFelladrin-TinyMistral248M-Instruct/blob/main/results_2023-12-16T13-05-29.280274.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2598863864332701,\n \"acc_stderr\": 0.03085871471372819,\n \"acc_norm\": 0.2612223474549382,\n \"acc_norm_stderr\": 0.03168256031998437,\n \"mc1\": 0.204406364749082,\n \"mc1_stderr\": 0.014117174337432621,\n \"mc2\": 0.40124313581017795,\n \"mc2_stderr\": 0.01490869512458324\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.19965870307167236,\n \"acc_stderr\": 0.011681625756888676,\n \"acc_norm\": 0.24744027303754265,\n \"acc_norm_stderr\": 0.01261035266329267\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.2757418840868353,\n \"acc_stderr\": 0.004459740315490862,\n \"acc_norm\": 0.2779326827325234,\n \"acc_norm_stderr\": 0.004470644845242891\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768081,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768081\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.1925925925925926,\n \"acc_stderr\": 0.03406542058502652,\n \"acc_norm\": 0.1925925925925926,\n \"acc_norm_stderr\": 0.03406542058502652\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.2236842105263158,\n \"acc_stderr\": 0.03391160934343604,\n \"acc_norm\": 0.2236842105263158,\n \"acc_norm_stderr\": 0.03391160934343604\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.17,\n \"acc_stderr\": 0.0377525168068637,\n \"acc_norm\": 0.17,\n \"acc_norm_stderr\": 0.0377525168068637\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.2792452830188679,\n \"acc_stderr\": 0.02761116340239972,\n \"acc_norm\": 0.2792452830188679,\n \"acc_norm_stderr\": 0.02761116340239972\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2569444444444444,\n \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.2569444444444444,\n \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.26011560693641617,\n \"acc_stderr\": 0.033450369167889904,\n \"acc_norm\": 0.26011560693641617,\n \"acc_norm_stderr\": 0.033450369167889904\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.20588235294117646,\n \"acc_stderr\": 0.04023382273617749,\n \"acc_norm\": 0.20588235294117646,\n \"acc_norm_stderr\": 0.04023382273617749\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.03861229196653695,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.03861229196653695\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.3148936170212766,\n \"acc_stderr\": 0.030363582197238167,\n \"acc_norm\": 0.3148936170212766,\n \"acc_norm_stderr\": 0.030363582197238167\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.32456140350877194,\n \"acc_stderr\": 0.04404556157374768,\n \"acc_norm\": 0.32456140350877194,\n \"acc_norm_stderr\": 0.04404556157374768\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2413793103448276,\n \"acc_stderr\": 0.03565998174135303,\n \"acc_norm\": 0.2413793103448276,\n \"acc_norm_stderr\": 0.03565998174135303\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2724867724867725,\n \"acc_stderr\": 0.022930973071633345,\n \"acc_norm\": 0.2724867724867725,\n \"acc_norm_stderr\": 0.022930973071633345\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.23809523809523808,\n \"acc_stderr\": 0.03809523809523812,\n \"acc_norm\": 0.23809523809523808,\n \"acc_norm_stderr\": 0.03809523809523812\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.24193548387096775,\n \"acc_stderr\": 0.02436259969303109,\n \"acc_norm\": 0.24193548387096775,\n \"acc_norm_stderr\": 0.02436259969303109\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.031785297106427496,\n \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.031785297106427496\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816505,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816505\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.032250781083062896,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.032250781083062896\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.37373737373737376,\n \"acc_stderr\": 0.034468977386593325,\n \"acc_norm\": 0.37373737373737376,\n \"acc_norm_stderr\": 0.034468977386593325\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.3626943005181347,\n \"acc_stderr\": 0.034697137917043715,\n \"acc_norm\": 0.3626943005181347,\n \"acc_norm_stderr\": 0.034697137917043715\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.33589743589743587,\n \"acc_stderr\": 0.023946724741563976,\n \"acc_norm\": 0.33589743589743587,\n \"acc_norm_stderr\": 0.023946724741563976\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.02671924078371218,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.02671924078371218\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.3277310924369748,\n \"acc_stderr\": 0.030489911417673227,\n \"acc_norm\": 0.3277310924369748,\n \"acc_norm_stderr\": 0.030489911417673227\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.19205298013245034,\n \"acc_stderr\": 0.03216298420593613,\n \"acc_norm\": 0.19205298013245034,\n \"acc_norm_stderr\": 0.03216298420593613\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.30642201834862387,\n \"acc_stderr\": 0.019765517220458523,\n \"acc_norm\": 0.30642201834862387,\n \"acc_norm_stderr\": 0.019765517220458523\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.3101851851851852,\n \"acc_stderr\": 0.031546962856566295,\n \"acc_norm\": 0.3101851851851852,\n \"acc_norm_stderr\": 0.031546962856566295\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.29411764705882354,\n \"acc_stderr\": 0.03198001660115071,\n \"acc_norm\": 0.29411764705882354,\n \"acc_norm_stderr\": 0.03198001660115071\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.2109704641350211,\n \"acc_stderr\": 0.026558372502661923,\n \"acc_norm\": 0.2109704641350211,\n \"acc_norm_stderr\": 0.026558372502661923\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.22869955156950672,\n \"acc_stderr\": 0.028188240046929196,\n \"acc_norm\": 0.22869955156950672,\n \"acc_norm_stderr\": 0.028188240046929196\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.2824427480916031,\n \"acc_stderr\": 0.03948406125768361,\n \"acc_norm\": 0.2824427480916031,\n \"acc_norm_stderr\": 0.03948406125768361\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.11570247933884298,\n \"acc_stderr\": 0.0291998024556228,\n \"acc_norm\": 0.11570247933884298,\n \"acc_norm_stderr\": 0.0291998024556228\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.2777777777777778,\n \"acc_stderr\": 0.043300437496507416,\n \"acc_norm\": 0.2777777777777778,\n \"acc_norm_stderr\": 0.043300437496507416\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.22699386503067484,\n \"acc_stderr\": 0.03291099578615769,\n \"acc_norm\": 0.22699386503067484,\n \"acc_norm_stderr\": 0.03291099578615769\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.29464285714285715,\n \"acc_stderr\": 0.04327040932578729,\n \"acc_norm\": 0.29464285714285715,\n \"acc_norm_stderr\": 0.04327040932578729\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.44660194174757284,\n \"acc_stderr\": 0.04922424153458933,\n \"acc_norm\": 0.44660194174757284,\n \"acc_norm_stderr\": 0.04922424153458933\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2094017094017094,\n \"acc_stderr\": 0.026655699653922737,\n \"acc_norm\": 0.2094017094017094,\n \"acc_norm_stderr\": 0.026655699653922737\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.21966794380587484,\n \"acc_stderr\": 0.014805384478371176,\n \"acc_norm\": 0.21966794380587484,\n \"acc_norm_stderr\": 0.014805384478371176\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.2138728323699422,\n \"acc_stderr\": 0.022075709251757183,\n \"acc_norm\": 0.2138728323699422,\n \"acc_norm_stderr\": 0.022075709251757183\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2636871508379888,\n \"acc_stderr\": 0.01473692638376196,\n \"acc_norm\": 0.2636871508379888,\n \"acc_norm_stderr\": 0.01473692638376196\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.24836601307189543,\n \"acc_stderr\": 0.024739981355113596,\n \"acc_norm\": 0.24836601307189543,\n \"acc_norm_stderr\": 0.024739981355113596\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.2861736334405145,\n \"acc_stderr\": 0.025670259242188936,\n \"acc_norm\": 0.2861736334405145,\n \"acc_norm_stderr\": 0.025670259242188936\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.24074074074074073,\n \"acc_stderr\": 0.023788583551658533,\n \"acc_norm\": 0.24074074074074073,\n \"acc_norm_stderr\": 0.023788583551658533\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.24113475177304963,\n \"acc_stderr\": 0.02551873104953777,\n \"acc_norm\": 0.24113475177304963,\n \"acc_norm_stderr\": 0.02551873104953777\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2438070404172099,\n \"acc_stderr\": 0.010966507972178477,\n \"acc_norm\": 0.2438070404172099,\n \"acc_norm_stderr\": 0.010966507972178477\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.3713235294117647,\n \"acc_stderr\": 0.02934980313976587,\n \"acc_norm\": 0.3713235294117647,\n \"acc_norm_stderr\": 0.02934980313976587\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.01751781884501444,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.01751781884501444\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.2636363636363636,\n \"acc_stderr\": 0.04220224692971987,\n \"acc_norm\": 0.2636363636363636,\n \"acc_norm_stderr\": 0.04220224692971987\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.028920583220675578,\n \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.028920583220675578\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.2736318407960199,\n \"acc_stderr\": 0.03152439186555401,\n \"acc_norm\": 0.2736318407960199,\n \"acc_norm_stderr\": 0.03152439186555401\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816505,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816505\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.24096385542168675,\n \"acc_stderr\": 0.03329394119073529,\n \"acc_norm\": 0.24096385542168675,\n \"acc_norm_stderr\": 0.03329394119073529\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.19298245614035087,\n \"acc_stderr\": 0.030267457554898465,\n \"acc_norm\": 0.19298245614035087,\n \"acc_norm_stderr\": 0.030267457554898465\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.204406364749082,\n \"mc1_stderr\": 0.014117174337432621,\n \"mc2\": 0.40124313581017795,\n \"mc2_stderr\": 0.01490869512458324\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.4909234411996843,\n \"acc_stderr\": 0.014050170094497704\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/Locutusque/LocutusqueXFelladrin-TinyMistral248M-Instruct", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|arc:challenge|25_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|gsm8k|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hellaswag|10_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T13-05-29.280274.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["**/details_harness|winogrande|5_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T13-05-29.280274.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T13_05_29.280274", "path": ["results_2023-12-16T13-05-29.280274.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T13-05-29.280274.parquet"]}]}]}
2023-12-16T13:09:03+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Locutusque/LocutusqueXFelladrin-TinyMistral248M-Instruct Dataset automatically created during the evaluation run of model Locutusque/LocutusqueXFelladrin-TinyMistral248M-Instruct on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T13:05:29.280274(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Locutusque/LocutusqueXFelladrin-TinyMistral248M-Instruct\n\n\n\nDataset automatically created during the evaluation run of model Locutusque/LocutusqueXFelladrin-TinyMistral248M-Instruct on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T13:05:29.280274(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Locutusque/LocutusqueXFelladrin-TinyMistral248M-Instruct\n\n\n\nDataset automatically created during the evaluation run of model Locutusque/LocutusqueXFelladrin-TinyMistral248M-Instruct on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T13:05:29.280274(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 205, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Locutusque/LocutusqueXFelladrin-TinyMistral248M-Instruct\n\n\n\nDataset automatically created during the evaluation run of model Locutusque/LocutusqueXFelladrin-TinyMistral248M-Instruct on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T13:05:29.280274(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]" ]
9fe7f674be1c442073cea7f33d3df9b58816c26f
Responses to the questions in [unnaturalhermes-questions-30k](https://huggingface.co/datasets/ericflo/unnaturalhermes-questions-30k) from the following models at temperature=0: * [mistralai/Mixtral-8x7B-Instruct-v0.1](https://huggingface.co/mistralai/Mixtral-8x7B-Instruct-v0.1) * [teknium/OpenHermes-2p5-Mistral-7B](https://huggingface.co/teknium/OpenHermes-2.5-Mistral-7B) * [mistralai/Mistral-7B-Instruct-v0.2](https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2) * [togethercomputer/StripedHyena-Nous-7B](togethercomputer/StripedHyena-Nous-7B)
ericflo/unnaturalhermes-responses-30k
[ "task_categories:question-answering", "size_categories:100K<n<1M", "language:en", "license:apache-2.0", "region:us" ]
2023-12-16T13:24:07+00:00
{"language": ["en"], "license": "apache-2.0", "size_categories": ["100K<n<1M"], "task_categories": ["question-answering"]}
2023-12-16T22:18:03+00:00
[]
[ "en" ]
TAGS #task_categories-question-answering #size_categories-100K<n<1M #language-English #license-apache-2.0 #region-us
Responses to the questions in unnaturalhermes-questions-30k from the following models at temperature=0: * mistralai/Mixtral-8x7B-Instruct-v0.1 * teknium/OpenHermes-2p5-Mistral-7B * mistralai/Mistral-7B-Instruct-v0.2 * togethercomputer/StripedHyena-Nous-7B
[]
[ "TAGS\n#task_categories-question-answering #size_categories-100K<n<1M #language-English #license-apache-2.0 #region-us \n" ]
[ 42 ]
[ "passage: TAGS\n#task_categories-question-answering #size_categories-100K<n<1M #language-English #license-apache-2.0 #region-us \n" ]
cf6e81fc94812f3b445ee1e1afd795ec2d756985
# Dataset Card for Evaluation run of distilgpt2 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [distilgpt2](https://huggingface.co/distilgpt2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_distilgpt2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T13:25:51.422801](https://huggingface.co/datasets/open-llm-leaderboard/details_distilgpt2/blob/main/results_2023-12-16T13-25-51.422801.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.24791527761428814, "acc_stderr": 0.030281954866030584, "acc_norm": 0.2481142819487892, "acc_norm_stderr": 0.03108662123022961, "mc1": 0.24969400244798043, "mc1_stderr": 0.015152286907148128, "mc2": 0.444910866699754, "mc2_stderr": 0.015279069341683332 }, "harness|arc:challenge|25": { "acc": 0.18088737201365188, "acc_stderr": 0.011248574467407024, "acc_norm": 0.2226962457337884, "acc_norm_stderr": 0.012158314774829931 }, "harness|hellaswag|10": { "acc": 0.2705636327424816, "acc_stderr": 0.0044334307903494095, "acc_norm": 0.2758414658434575, "acc_norm_stderr": 0.004460238879247437 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932268, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.24444444444444444, "acc_stderr": 0.03712537833614866, "acc_norm": 0.24444444444444444, "acc_norm_stderr": 0.03712537833614866 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.17763157894736842, "acc_stderr": 0.031103182383123398, "acc_norm": 0.17763157894736842, "acc_norm_stderr": 0.031103182383123398 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.22264150943396227, "acc_stderr": 0.0256042334708991, "acc_norm": 0.22264150943396227, "acc_norm_stderr": 0.0256042334708991 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2222222222222222, "acc_stderr": 0.03476590104304134, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.03476590104304134 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.17, "acc_stderr": 0.03775251680686371, "acc_norm": 0.17, "acc_norm_stderr": 0.03775251680686371 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.18, "acc_stderr": 0.03861229196653694, "acc_norm": 0.18, "acc_norm_stderr": 0.03861229196653694 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.27, "acc_stderr": 0.04461960433384739, "acc_norm": 0.27, "acc_norm_stderr": 0.04461960433384739 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.20809248554913296, "acc_stderr": 0.030952890217749895, "acc_norm": 0.20809248554913296, "acc_norm_stderr": 0.030952890217749895 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.20588235294117646, "acc_stderr": 0.04023382273617747, "acc_norm": 0.20588235294117646, "acc_norm_stderr": 0.04023382273617747 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.28085106382978725, "acc_stderr": 0.029379170464124818, "acc_norm": 0.28085106382978725, "acc_norm_stderr": 0.029379170464124818 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2631578947368421, "acc_stderr": 0.04142439719489362, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.04142439719489362 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.30344827586206896, "acc_stderr": 0.038312260488503336, "acc_norm": 0.30344827586206896, "acc_norm_stderr": 0.038312260488503336 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2566137566137566, "acc_stderr": 0.022494510767503154, "acc_norm": 0.2566137566137566, "acc_norm_stderr": 0.022494510767503154 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.1984126984126984, "acc_stderr": 0.03567016675276865, "acc_norm": 0.1984126984126984, "acc_norm_stderr": 0.03567016675276865 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.15, "acc_stderr": 0.0358870281282637, "acc_norm": 0.15, "acc_norm_stderr": 0.0358870281282637 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.3193548387096774, "acc_stderr": 0.026522709674667768, "acc_norm": 0.3193548387096774, "acc_norm_stderr": 0.026522709674667768 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.30049261083743845, "acc_stderr": 0.03225799476233483, "acc_norm": 0.30049261083743845, "acc_norm_stderr": 0.03225799476233483 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.2, "acc_stderr": 0.04020151261036846, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036846 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03225078108306289, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.2878787878787879, "acc_stderr": 0.03225883512300992, "acc_norm": 0.2878787878787879, "acc_norm_stderr": 0.03225883512300992 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.23316062176165803, "acc_stderr": 0.030516111371476008, "acc_norm": 0.23316062176165803, "acc_norm_stderr": 0.030516111371476008 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.3128205128205128, "acc_stderr": 0.023507579020645347, "acc_norm": 0.3128205128205128, "acc_norm_stderr": 0.023507579020645347 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.26296296296296295, "acc_stderr": 0.026842057873833706, "acc_norm": 0.26296296296296295, "acc_norm_stderr": 0.026842057873833706 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.23949579831932774, "acc_stderr": 0.027722065493361273, "acc_norm": 0.23949579831932774, "acc_norm_stderr": 0.027722065493361273 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31125827814569534, "acc_stderr": 0.03780445850526733, "acc_norm": 0.31125827814569534, "acc_norm_stderr": 0.03780445850526733 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.21467889908256882, "acc_stderr": 0.017604304149256483, "acc_norm": 0.21467889908256882, "acc_norm_stderr": 0.017604304149256483 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4722222222222222, "acc_stderr": 0.0340470532865388, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.0340470532865388 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.31862745098039214, "acc_stderr": 0.032702871814820796, "acc_norm": 0.31862745098039214, "acc_norm_stderr": 0.032702871814820796 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.2616033755274262, "acc_stderr": 0.028609516716994934, "acc_norm": 0.2616033755274262, "acc_norm_stderr": 0.028609516716994934 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.21973094170403587, "acc_stderr": 0.027790177064383602, "acc_norm": 0.21973094170403587, "acc_norm_stderr": 0.027790177064383602 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2748091603053435, "acc_stderr": 0.03915345408847836, "acc_norm": 0.2748091603053435, "acc_norm_stderr": 0.03915345408847836 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2975206611570248, "acc_stderr": 0.04173349148083499, "acc_norm": 0.2975206611570248, "acc_norm_stderr": 0.04173349148083499 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.24074074074074073, "acc_stderr": 0.041331194402438376, "acc_norm": 0.24074074074074073, "acc_norm_stderr": 0.041331194402438376 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.26380368098159507, "acc_stderr": 0.03462419931615624, "acc_norm": 0.26380368098159507, "acc_norm_stderr": 0.03462419931615624 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.26785714285714285, "acc_stderr": 0.042032772914677614, "acc_norm": 0.26785714285714285, "acc_norm_stderr": 0.042032772914677614 }, "harness|hendrycksTest-management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266224, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266224 }, "harness|hendrycksTest-marketing|5": { "acc": 0.19230769230769232, "acc_stderr": 0.025819233256483706, "acc_norm": 0.19230769230769232, "acc_norm_stderr": 0.025819233256483706 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.25798212005108556, "acc_stderr": 0.01564583018834895, "acc_norm": 0.25798212005108556, "acc_norm_stderr": 0.01564583018834895 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.2023121387283237, "acc_stderr": 0.021628077380196124, "acc_norm": 0.2023121387283237, "acc_norm_stderr": 0.021628077380196124 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574915, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574915 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.27124183006535946, "acc_stderr": 0.025457756696667867, "acc_norm": 0.27124183006535946, "acc_norm_stderr": 0.025457756696667867 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.1864951768488746, "acc_stderr": 0.02212243977248077, "acc_norm": 0.1864951768488746, "acc_norm_stderr": 0.02212243977248077 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.2777777777777778, "acc_stderr": 0.02492200116888633, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.02492200116888633 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.18085106382978725, "acc_stderr": 0.022960894850119137, "acc_norm": 0.18085106382978725, "acc_norm_stderr": 0.022960894850119137 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.24641460234680573, "acc_stderr": 0.011005971399927235, "acc_norm": 0.24641460234680573, "acc_norm_stderr": 0.011005971399927235 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.44485294117647056, "acc_stderr": 0.030187532060329376, "acc_norm": 0.44485294117647056, "acc_norm_stderr": 0.030187532060329376 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.25, "acc_stderr": 0.01751781884501444, "acc_norm": 0.25, "acc_norm_stderr": 0.01751781884501444 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.22727272727272727, "acc_stderr": 0.040139645540727735, "acc_norm": 0.22727272727272727, "acc_norm_stderr": 0.040139645540727735 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.2612244897959184, "acc_stderr": 0.02812342933514278, "acc_norm": 0.2612244897959184, "acc_norm_stderr": 0.02812342933514278 }, "harness|hendrycksTest-sociology|5": { "acc": 0.23880597014925373, "acc_stderr": 0.030147775935409217, "acc_norm": 0.23880597014925373, "acc_norm_stderr": 0.030147775935409217 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-virology|5": { "acc": 0.19879518072289157, "acc_stderr": 0.03106939026078943, "acc_norm": 0.19879518072289157, "acc_norm_stderr": 0.03106939026078943 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.21052631578947367, "acc_stderr": 0.0312678171466318, "acc_norm": 0.21052631578947367, "acc_norm_stderr": 0.0312678171466318 }, "harness|truthfulqa:mc|0": { "mc1": 0.24969400244798043, "mc1_stderr": 0.015152286907148128, "mc2": 0.444910866699754, "mc2_stderr": 0.015279069341683332 }, "harness|winogrande|5": { "acc": 0.5311760063141279, "acc_stderr": 0.014025142640639518 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_distilgpt2
[ "region:us" ]
2023-12-16T13:27:28+00:00
{"pretty_name": "Evaluation run of distilgpt2", "dataset_summary": "Dataset automatically created during the evaluation run of model [distilgpt2](https://huggingface.co/distilgpt2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_distilgpt2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T13:25:51.422801](https://huggingface.co/datasets/open-llm-leaderboard/details_distilgpt2/blob/main/results_2023-12-16T13-25-51.422801.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.24791527761428814,\n \"acc_stderr\": 0.030281954866030584,\n \"acc_norm\": 0.2481142819487892,\n \"acc_norm_stderr\": 0.03108662123022961,\n \"mc1\": 0.24969400244798043,\n \"mc1_stderr\": 0.015152286907148128,\n \"mc2\": 0.444910866699754,\n \"mc2_stderr\": 0.015279069341683332\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.18088737201365188,\n \"acc_stderr\": 0.011248574467407024,\n \"acc_norm\": 0.2226962457337884,\n \"acc_norm_stderr\": 0.012158314774829931\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.2705636327424816,\n \"acc_stderr\": 0.0044334307903494095,\n \"acc_norm\": 0.2758414658434575,\n \"acc_norm_stderr\": 0.004460238879247437\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932268,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932268\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.24444444444444444,\n \"acc_stderr\": 0.03712537833614866,\n \"acc_norm\": 0.24444444444444444,\n \"acc_norm_stderr\": 0.03712537833614866\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.17763157894736842,\n \"acc_stderr\": 0.031103182383123398,\n \"acc_norm\": 0.17763157894736842,\n \"acc_norm_stderr\": 0.031103182383123398\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.22264150943396227,\n \"acc_stderr\": 0.0256042334708991,\n \"acc_norm\": 0.22264150943396227,\n \"acc_norm_stderr\": 0.0256042334708991\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2222222222222222,\n \"acc_stderr\": 0.03476590104304134,\n \"acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.03476590104304134\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.17,\n \"acc_stderr\": 0.03775251680686371,\n \"acc_norm\": 0.17,\n \"acc_norm_stderr\": 0.03775251680686371\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.03861229196653694,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.03861229196653694\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.04461960433384739,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.04461960433384739\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.20809248554913296,\n \"acc_stderr\": 0.030952890217749895,\n \"acc_norm\": 0.20809248554913296,\n \"acc_norm_stderr\": 0.030952890217749895\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.20588235294117646,\n \"acc_stderr\": 0.04023382273617747,\n \"acc_norm\": 0.20588235294117646,\n \"acc_norm_stderr\": 0.04023382273617747\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.28085106382978725,\n \"acc_stderr\": 0.029379170464124818,\n \"acc_norm\": 0.28085106382978725,\n \"acc_norm_stderr\": 0.029379170464124818\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2631578947368421,\n \"acc_stderr\": 0.04142439719489362,\n \"acc_norm\": 0.2631578947368421,\n \"acc_norm_stderr\": 0.04142439719489362\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.30344827586206896,\n \"acc_stderr\": 0.038312260488503336,\n \"acc_norm\": 0.30344827586206896,\n \"acc_norm_stderr\": 0.038312260488503336\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2566137566137566,\n \"acc_stderr\": 0.022494510767503154,\n \"acc_norm\": 0.2566137566137566,\n \"acc_norm_stderr\": 0.022494510767503154\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.1984126984126984,\n \"acc_stderr\": 0.03567016675276865,\n \"acc_norm\": 0.1984126984126984,\n \"acc_norm_stderr\": 0.03567016675276865\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.15,\n \"acc_stderr\": 0.0358870281282637,\n \"acc_norm\": 0.15,\n \"acc_norm_stderr\": 0.0358870281282637\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.3193548387096774,\n \"acc_stderr\": 0.026522709674667768,\n \"acc_norm\": 0.3193548387096774,\n \"acc_norm_stderr\": 0.026522709674667768\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.30049261083743845,\n \"acc_stderr\": 0.03225799476233483,\n \"acc_norm\": 0.30049261083743845,\n \"acc_norm_stderr\": 0.03225799476233483\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036846,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036846\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.2878787878787879,\n \"acc_stderr\": 0.03225883512300992,\n \"acc_norm\": 0.2878787878787879,\n \"acc_norm_stderr\": 0.03225883512300992\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.23316062176165803,\n \"acc_stderr\": 0.030516111371476008,\n \"acc_norm\": 0.23316062176165803,\n \"acc_norm_stderr\": 0.030516111371476008\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.3128205128205128,\n \"acc_stderr\": 0.023507579020645347,\n \"acc_norm\": 0.3128205128205128,\n \"acc_norm_stderr\": 0.023507579020645347\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.26296296296296295,\n \"acc_stderr\": 0.026842057873833706,\n \"acc_norm\": 0.26296296296296295,\n \"acc_norm_stderr\": 0.026842057873833706\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.23949579831932774,\n \"acc_stderr\": 0.027722065493361273,\n \"acc_norm\": 0.23949579831932774,\n \"acc_norm_stderr\": 0.027722065493361273\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31125827814569534,\n \"acc_stderr\": 0.03780445850526733,\n \"acc_norm\": 0.31125827814569534,\n \"acc_norm_stderr\": 0.03780445850526733\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.21467889908256882,\n \"acc_stderr\": 0.017604304149256483,\n \"acc_norm\": 0.21467889908256882,\n \"acc_norm_stderr\": 0.017604304149256483\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4722222222222222,\n \"acc_stderr\": 0.0340470532865388,\n \"acc_norm\": 0.4722222222222222,\n \"acc_norm_stderr\": 0.0340470532865388\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.31862745098039214,\n \"acc_stderr\": 0.032702871814820796,\n \"acc_norm\": 0.31862745098039214,\n \"acc_norm_stderr\": 0.032702871814820796\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.2616033755274262,\n \"acc_stderr\": 0.028609516716994934,\n \"acc_norm\": 0.2616033755274262,\n \"acc_norm_stderr\": 0.028609516716994934\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.21973094170403587,\n \"acc_stderr\": 0.027790177064383602,\n \"acc_norm\": 0.21973094170403587,\n \"acc_norm_stderr\": 0.027790177064383602\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.2748091603053435,\n \"acc_stderr\": 0.03915345408847836,\n \"acc_norm\": 0.2748091603053435,\n \"acc_norm_stderr\": 0.03915345408847836\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.2975206611570248,\n \"acc_stderr\": 0.04173349148083499,\n \"acc_norm\": 0.2975206611570248,\n \"acc_norm_stderr\": 0.04173349148083499\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.24074074074074073,\n \"acc_stderr\": 0.041331194402438376,\n \"acc_norm\": 0.24074074074074073,\n \"acc_norm_stderr\": 0.041331194402438376\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.26380368098159507,\n \"acc_stderr\": 0.03462419931615624,\n \"acc_norm\": 0.26380368098159507,\n \"acc_norm_stderr\": 0.03462419931615624\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.26785714285714285,\n \"acc_stderr\": 0.042032772914677614,\n \"acc_norm\": 0.26785714285714285,\n \"acc_norm_stderr\": 0.042032772914677614\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.17475728155339806,\n \"acc_stderr\": 0.037601780060266224,\n \"acc_norm\": 0.17475728155339806,\n \"acc_norm_stderr\": 0.037601780060266224\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.19230769230769232,\n \"acc_stderr\": 0.025819233256483706,\n \"acc_norm\": 0.19230769230769232,\n \"acc_norm_stderr\": 0.025819233256483706\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.25798212005108556,\n \"acc_stderr\": 0.01564583018834895,\n \"acc_norm\": 0.25798212005108556,\n \"acc_norm_stderr\": 0.01564583018834895\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.2023121387283237,\n \"acc_stderr\": 0.021628077380196124,\n \"acc_norm\": 0.2023121387283237,\n \"acc_norm_stderr\": 0.021628077380196124\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.27124183006535946,\n \"acc_stderr\": 0.025457756696667867,\n \"acc_norm\": 0.27124183006535946,\n \"acc_norm_stderr\": 0.025457756696667867\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.1864951768488746,\n \"acc_stderr\": 0.02212243977248077,\n \"acc_norm\": 0.1864951768488746,\n \"acc_norm_stderr\": 0.02212243977248077\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.2777777777777778,\n \"acc_stderr\": 0.02492200116888633,\n \"acc_norm\": 0.2777777777777778,\n \"acc_norm_stderr\": 0.02492200116888633\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.18085106382978725,\n \"acc_stderr\": 0.022960894850119137,\n \"acc_norm\": 0.18085106382978725,\n \"acc_norm_stderr\": 0.022960894850119137\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.24641460234680573,\n \"acc_stderr\": 0.011005971399927235,\n \"acc_norm\": 0.24641460234680573,\n \"acc_norm_stderr\": 0.011005971399927235\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.44485294117647056,\n \"acc_stderr\": 0.030187532060329376,\n \"acc_norm\": 0.44485294117647056,\n \"acc_norm_stderr\": 0.030187532060329376\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.01751781884501444,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.01751781884501444\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.22727272727272727,\n \"acc_stderr\": 0.040139645540727735,\n \"acc_norm\": 0.22727272727272727,\n \"acc_norm_stderr\": 0.040139645540727735\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.2612244897959184,\n \"acc_stderr\": 0.02812342933514278,\n \"acc_norm\": 0.2612244897959184,\n \"acc_norm_stderr\": 0.02812342933514278\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.23880597014925373,\n \"acc_stderr\": 0.030147775935409217,\n \"acc_norm\": 0.23880597014925373,\n \"acc_norm_stderr\": 0.030147775935409217\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.19879518072289157,\n \"acc_stderr\": 0.03106939026078943,\n \"acc_norm\": 0.19879518072289157,\n \"acc_norm_stderr\": 0.03106939026078943\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.21052631578947367,\n \"acc_stderr\": 0.0312678171466318,\n \"acc_norm\": 0.21052631578947367,\n \"acc_norm_stderr\": 0.0312678171466318\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.24969400244798043,\n \"mc1_stderr\": 0.015152286907148128,\n \"mc2\": 0.444910866699754,\n \"mc2_stderr\": 0.015279069341683332\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5311760063141279,\n \"acc_stderr\": 0.014025142640639518\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/distilgpt2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|arc:challenge|25_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|gsm8k|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hellaswag|10_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T13-25-51.422801.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["**/details_harness|winogrande|5_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T13-25-51.422801.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T13_25_51.422801", "path": ["results_2023-12-16T13-25-51.422801.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T13-25-51.422801.parquet"]}]}]}
2023-12-16T13:28:11+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of distilgpt2 Dataset automatically created during the evaluation run of model distilgpt2 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T13:25:51.422801(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of distilgpt2\n\n\n\nDataset automatically created during the evaluation run of model distilgpt2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T13:25:51.422801(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of distilgpt2\n\n\n\nDataset automatically created during the evaluation run of model distilgpt2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T13:25:51.422801(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 171, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of distilgpt2\n\n\n\nDataset automatically created during the evaluation run of model distilgpt2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T13:25:51.422801(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
1e936a23dfa5abe9b2f0e63d5117d75d50c784da
# Dataset Card for Evaluation run of wang7776/Llama-2-7b-chat-hf-20-sparsity <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [wang7776/Llama-2-7b-chat-hf-20-sparsity](https://huggingface.co/wang7776/Llama-2-7b-chat-hf-20-sparsity) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_wang7776__Llama-2-7b-chat-hf-20-sparsity", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T13:55:29.096442](https://huggingface.co/datasets/open-llm-leaderboard/details_wang7776__Llama-2-7b-chat-hf-20-sparsity/blob/main/results_2023-12-16T13-55-29.096442.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.47402608498324605, "acc_stderr": 0.03438253640771256, "acc_norm": 0.4787691452187974, "acc_norm_stderr": 0.03513920510566655, "mc1": 0.2974296205630355, "mc1_stderr": 0.016002651487361002, "mc2": 0.45875295870930416, "mc2_stderr": 0.01569027399570652 }, "harness|arc:challenge|25": { "acc": 0.5, "acc_stderr": 0.014611390804670088, "acc_norm": 0.5247440273037542, "acc_norm_stderr": 0.01459348769493774 }, "harness|hellaswag|10": { "acc": 0.5867357100179247, "acc_stderr": 0.004914130855431776, "acc_norm": 0.7791276638119896, "acc_norm_stderr": 0.0041398679751162995 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.04560480215720683, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720683 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4148148148148148, "acc_stderr": 0.042561937679014075, "acc_norm": 0.4148148148148148, "acc_norm_stderr": 0.042561937679014075 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5, "acc_stderr": 0.04068942293855797, "acc_norm": 0.5, "acc_norm_stderr": 0.04068942293855797 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5283018867924528, "acc_stderr": 0.030723535249006107, "acc_norm": 0.5283018867924528, "acc_norm_stderr": 0.030723535249006107 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5, "acc_stderr": 0.04181210050035455, "acc_norm": 0.5, "acc_norm_stderr": 0.04181210050035455 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.3872832369942196, "acc_stderr": 0.03714325906302065, "acc_norm": 0.3872832369942196, "acc_norm_stderr": 0.03714325906302065 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.20588235294117646, "acc_stderr": 0.04023382273617747, "acc_norm": 0.20588235294117646, "acc_norm_stderr": 0.04023382273617747 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.3872340425531915, "acc_stderr": 0.03184389265339526, "acc_norm": 0.3872340425531915, "acc_norm_stderr": 0.03184389265339526 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.35964912280701755, "acc_stderr": 0.04514496132873634, "acc_norm": 0.35964912280701755, "acc_norm_stderr": 0.04514496132873634 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.4827586206896552, "acc_stderr": 0.04164188720169377, "acc_norm": 0.4827586206896552, "acc_norm_stderr": 0.04164188720169377 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2751322751322751, "acc_stderr": 0.023000086859068652, "acc_norm": 0.2751322751322751, "acc_norm_stderr": 0.023000086859068652 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.25396825396825395, "acc_stderr": 0.038932596106046734, "acc_norm": 0.25396825396825395, "acc_norm_stderr": 0.038932596106046734 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.04793724854411019, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411019 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.5258064516129032, "acc_stderr": 0.02840609505765332, "acc_norm": 0.5258064516129032, "acc_norm_stderr": 0.02840609505765332 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.3497536945812808, "acc_stderr": 0.03355400904969566, "acc_norm": 0.3497536945812808, "acc_norm_stderr": 0.03355400904969566 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.5818181818181818, "acc_stderr": 0.03851716319398395, "acc_norm": 0.5818181818181818, "acc_norm_stderr": 0.03851716319398395 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.5909090909090909, "acc_stderr": 0.03502975799413007, "acc_norm": 0.5909090909090909, "acc_norm_stderr": 0.03502975799413007 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.6787564766839378, "acc_stderr": 0.033699508685490674, "acc_norm": 0.6787564766839378, "acc_norm_stderr": 0.033699508685490674 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.39487179487179486, "acc_stderr": 0.02478431694215637, "acc_norm": 0.39487179487179486, "acc_norm_stderr": 0.02478431694215637 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.26666666666666666, "acc_stderr": 0.026962424325073835, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.026962424325073835 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.42016806722689076, "acc_stderr": 0.03206183783236152, "acc_norm": 0.42016806722689076, "acc_norm_stderr": 0.03206183783236152 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.304635761589404, "acc_stderr": 0.03757949922943343, "acc_norm": 0.304635761589404, "acc_norm_stderr": 0.03757949922943343 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.6477064220183486, "acc_stderr": 0.020480568843998986, "acc_norm": 0.6477064220183486, "acc_norm_stderr": 0.020480568843998986 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.30092592592592593, "acc_stderr": 0.03128039084329881, "acc_norm": 0.30092592592592593, "acc_norm_stderr": 0.03128039084329881 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.6519607843137255, "acc_stderr": 0.03343311240488419, "acc_norm": 0.6519607843137255, "acc_norm_stderr": 0.03343311240488419 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.6244725738396625, "acc_stderr": 0.03152256243091156, "acc_norm": 0.6244725738396625, "acc_norm_stderr": 0.03152256243091156 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.5695067264573991, "acc_stderr": 0.0332319730294294, "acc_norm": 0.5695067264573991, "acc_norm_stderr": 0.0332319730294294 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.5648854961832062, "acc_stderr": 0.043482080516448585, "acc_norm": 0.5648854961832062, "acc_norm_stderr": 0.043482080516448585 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6115702479338843, "acc_stderr": 0.04449270350068382, "acc_norm": 0.6115702479338843, "acc_norm_stderr": 0.04449270350068382 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.5555555555555556, "acc_stderr": 0.04803752235190193, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.04803752235190193 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.558282208588957, "acc_stderr": 0.03901591825836184, "acc_norm": 0.558282208588957, "acc_norm_stderr": 0.03901591825836184 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.35714285714285715, "acc_stderr": 0.04547960999764376, "acc_norm": 0.35714285714285715, "acc_norm_stderr": 0.04547960999764376 }, "harness|hendrycksTest-management|5": { "acc": 0.6601941747572816, "acc_stderr": 0.046897659372781335, "acc_norm": 0.6601941747572816, "acc_norm_stderr": 0.046897659372781335 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7136752136752137, "acc_stderr": 0.02961432369045665, "acc_norm": 0.7136752136752137, "acc_norm_stderr": 0.02961432369045665 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.49, "acc_stderr": 0.05024183937956911, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956911 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.6666666666666666, "acc_stderr": 0.01685739124747255, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.01685739124747255 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.49710982658959535, "acc_stderr": 0.02691864538323901, "acc_norm": 0.49710982658959535, "acc_norm_stderr": 0.02691864538323901 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.24804469273743016, "acc_stderr": 0.014444157808261467, "acc_norm": 0.24804469273743016, "acc_norm_stderr": 0.014444157808261467 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.4934640522875817, "acc_stderr": 0.028627470550556054, "acc_norm": 0.4934640522875817, "acc_norm_stderr": 0.028627470550556054 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.5627009646302251, "acc_stderr": 0.028173917761762892, "acc_norm": 0.5627009646302251, "acc_norm_stderr": 0.028173917761762892 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5432098765432098, "acc_stderr": 0.02771666165019404, "acc_norm": 0.5432098765432098, "acc_norm_stderr": 0.02771666165019404 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.36879432624113473, "acc_stderr": 0.028782227561347243, "acc_norm": 0.36879432624113473, "acc_norm_stderr": 0.028782227561347243 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.35723598435462844, "acc_stderr": 0.012238615750316508, "acc_norm": 0.35723598435462844, "acc_norm_stderr": 0.012238615750316508 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.4007352941176471, "acc_stderr": 0.029768263528933116, "acc_norm": 0.4007352941176471, "acc_norm_stderr": 0.029768263528933116 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.4624183006535948, "acc_stderr": 0.020170614974969768, "acc_norm": 0.4624183006535948, "acc_norm_stderr": 0.020170614974969768 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5272727272727272, "acc_stderr": 0.04782001791380061, "acc_norm": 0.5272727272727272, "acc_norm_stderr": 0.04782001791380061 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.49795918367346936, "acc_stderr": 0.0320089533497105, "acc_norm": 0.49795918367346936, "acc_norm_stderr": 0.0320089533497105 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6517412935323383, "acc_stderr": 0.033687874661154596, "acc_norm": 0.6517412935323383, "acc_norm_stderr": 0.033687874661154596 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-virology|5": { "acc": 0.45180722891566266, "acc_stderr": 0.03874371556587953, "acc_norm": 0.45180722891566266, "acc_norm_stderr": 0.03874371556587953 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.6900584795321637, "acc_stderr": 0.035469769593931624, "acc_norm": 0.6900584795321637, "acc_norm_stderr": 0.035469769593931624 }, "harness|truthfulqa:mc|0": { "mc1": 0.2974296205630355, "mc1_stderr": 0.016002651487361002, "mc2": 0.45875295870930416, "mc2_stderr": 0.01569027399570652 }, "harness|winogrande|5": { "acc": 0.7071823204419889, "acc_stderr": 0.012789321118542618 }, "harness|gsm8k|5": { "acc": 0.17816527672479152, "acc_stderr": 0.01054013252754948 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_wang7776__Llama-2-7b-chat-hf-20-sparsity
[ "region:us" ]
2023-12-16T13:58:27+00:00
{"pretty_name": "Evaluation run of wang7776/Llama-2-7b-chat-hf-20-sparsity", "dataset_summary": "Dataset automatically created during the evaluation run of model [wang7776/Llama-2-7b-chat-hf-20-sparsity](https://huggingface.co/wang7776/Llama-2-7b-chat-hf-20-sparsity) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_wang7776__Llama-2-7b-chat-hf-20-sparsity\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T13:55:29.096442](https://huggingface.co/datasets/open-llm-leaderboard/details_wang7776__Llama-2-7b-chat-hf-20-sparsity/blob/main/results_2023-12-16T13-55-29.096442.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.47402608498324605,\n \"acc_stderr\": 0.03438253640771256,\n \"acc_norm\": 0.4787691452187974,\n \"acc_norm_stderr\": 0.03513920510566655,\n \"mc1\": 0.2974296205630355,\n \"mc1_stderr\": 0.016002651487361002,\n \"mc2\": 0.45875295870930416,\n \"mc2_stderr\": 0.01569027399570652\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.014611390804670088,\n \"acc_norm\": 0.5247440273037542,\n \"acc_norm_stderr\": 0.01459348769493774\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5867357100179247,\n \"acc_stderr\": 0.004914130855431776,\n \"acc_norm\": 0.7791276638119896,\n \"acc_norm_stderr\": 0.0041398679751162995\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720683,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720683\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4148148148148148,\n \"acc_stderr\": 0.042561937679014075,\n \"acc_norm\": 0.4148148148148148,\n \"acc_norm_stderr\": 0.042561937679014075\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.04068942293855797,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.04068942293855797\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5283018867924528,\n \"acc_stderr\": 0.030723535249006107,\n \"acc_norm\": 0.5283018867924528,\n \"acc_norm_stderr\": 0.030723535249006107\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.04181210050035455,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.04181210050035455\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3872832369942196,\n \"acc_stderr\": 0.03714325906302065,\n \"acc_norm\": 0.3872832369942196,\n \"acc_norm_stderr\": 0.03714325906302065\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.20588235294117646,\n \"acc_stderr\": 0.04023382273617747,\n \"acc_norm\": 0.20588235294117646,\n \"acc_norm_stderr\": 0.04023382273617747\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.3872340425531915,\n \"acc_stderr\": 0.03184389265339526,\n \"acc_norm\": 0.3872340425531915,\n \"acc_norm_stderr\": 0.03184389265339526\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.35964912280701755,\n \"acc_stderr\": 0.04514496132873634,\n \"acc_norm\": 0.35964912280701755,\n \"acc_norm_stderr\": 0.04514496132873634\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.4827586206896552,\n \"acc_stderr\": 0.04164188720169377,\n \"acc_norm\": 0.4827586206896552,\n \"acc_norm_stderr\": 0.04164188720169377\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2751322751322751,\n \"acc_stderr\": 0.023000086859068652,\n \"acc_norm\": 0.2751322751322751,\n \"acc_norm_stderr\": 0.023000086859068652\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.25396825396825395,\n \"acc_stderr\": 0.038932596106046734,\n \"acc_norm\": 0.25396825396825395,\n \"acc_norm_stderr\": 0.038932596106046734\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411019,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411019\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.5258064516129032,\n \"acc_stderr\": 0.02840609505765332,\n \"acc_norm\": 0.5258064516129032,\n \"acc_norm_stderr\": 0.02840609505765332\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.3497536945812808,\n \"acc_stderr\": 0.03355400904969566,\n \"acc_norm\": 0.3497536945812808,\n \"acc_norm_stderr\": 0.03355400904969566\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.5818181818181818,\n \"acc_stderr\": 0.03851716319398395,\n \"acc_norm\": 0.5818181818181818,\n \"acc_norm_stderr\": 0.03851716319398395\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.5909090909090909,\n \"acc_stderr\": 0.03502975799413007,\n \"acc_norm\": 0.5909090909090909,\n \"acc_norm_stderr\": 0.03502975799413007\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.6787564766839378,\n \"acc_stderr\": 0.033699508685490674,\n \"acc_norm\": 0.6787564766839378,\n \"acc_norm_stderr\": 0.033699508685490674\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.39487179487179486,\n \"acc_stderr\": 0.02478431694215637,\n \"acc_norm\": 0.39487179487179486,\n \"acc_norm_stderr\": 0.02478431694215637\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.26666666666666666,\n \"acc_stderr\": 0.026962424325073835,\n \"acc_norm\": 0.26666666666666666,\n \"acc_norm_stderr\": 0.026962424325073835\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.42016806722689076,\n \"acc_stderr\": 0.03206183783236152,\n \"acc_norm\": 0.42016806722689076,\n \"acc_norm_stderr\": 0.03206183783236152\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.304635761589404,\n \"acc_stderr\": 0.03757949922943343,\n \"acc_norm\": 0.304635761589404,\n \"acc_norm_stderr\": 0.03757949922943343\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.6477064220183486,\n \"acc_stderr\": 0.020480568843998986,\n \"acc_norm\": 0.6477064220183486,\n \"acc_norm_stderr\": 0.020480568843998986\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.30092592592592593,\n \"acc_stderr\": 0.03128039084329881,\n \"acc_norm\": 0.30092592592592593,\n \"acc_norm_stderr\": 0.03128039084329881\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.6519607843137255,\n \"acc_stderr\": 0.03343311240488419,\n \"acc_norm\": 0.6519607843137255,\n \"acc_norm_stderr\": 0.03343311240488419\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.6244725738396625,\n \"acc_stderr\": 0.03152256243091156,\n \"acc_norm\": 0.6244725738396625,\n \"acc_norm_stderr\": 0.03152256243091156\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.5695067264573991,\n \"acc_stderr\": 0.0332319730294294,\n \"acc_norm\": 0.5695067264573991,\n \"acc_norm_stderr\": 0.0332319730294294\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.5648854961832062,\n \"acc_stderr\": 0.043482080516448585,\n \"acc_norm\": 0.5648854961832062,\n \"acc_norm_stderr\": 0.043482080516448585\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6115702479338843,\n \"acc_stderr\": 0.04449270350068382,\n \"acc_norm\": 0.6115702479338843,\n \"acc_norm_stderr\": 0.04449270350068382\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.5555555555555556,\n \"acc_stderr\": 0.04803752235190193,\n \"acc_norm\": 0.5555555555555556,\n \"acc_norm_stderr\": 0.04803752235190193\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.558282208588957,\n \"acc_stderr\": 0.03901591825836184,\n \"acc_norm\": 0.558282208588957,\n \"acc_norm_stderr\": 0.03901591825836184\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.35714285714285715,\n \"acc_stderr\": 0.04547960999764376,\n \"acc_norm\": 0.35714285714285715,\n \"acc_norm_stderr\": 0.04547960999764376\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6601941747572816,\n \"acc_stderr\": 0.046897659372781335,\n \"acc_norm\": 0.6601941747572816,\n \"acc_norm_stderr\": 0.046897659372781335\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7136752136752137,\n \"acc_stderr\": 0.02961432369045665,\n \"acc_norm\": 0.7136752136752137,\n \"acc_norm_stderr\": 0.02961432369045665\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956911,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956911\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.01685739124747255,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.01685739124747255\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.49710982658959535,\n \"acc_stderr\": 0.02691864538323901,\n \"acc_norm\": 0.49710982658959535,\n \"acc_norm_stderr\": 0.02691864538323901\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.24804469273743016,\n \"acc_stderr\": 0.014444157808261467,\n \"acc_norm\": 0.24804469273743016,\n \"acc_norm_stderr\": 0.014444157808261467\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.4934640522875817,\n \"acc_stderr\": 0.028627470550556054,\n \"acc_norm\": 0.4934640522875817,\n \"acc_norm_stderr\": 0.028627470550556054\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.5627009646302251,\n \"acc_stderr\": 0.028173917761762892,\n \"acc_norm\": 0.5627009646302251,\n \"acc_norm_stderr\": 0.028173917761762892\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.5432098765432098,\n \"acc_stderr\": 0.02771666165019404,\n \"acc_norm\": 0.5432098765432098,\n \"acc_norm_stderr\": 0.02771666165019404\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.36879432624113473,\n \"acc_stderr\": 0.028782227561347243,\n \"acc_norm\": 0.36879432624113473,\n \"acc_norm_stderr\": 0.028782227561347243\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.35723598435462844,\n \"acc_stderr\": 0.012238615750316508,\n \"acc_norm\": 0.35723598435462844,\n \"acc_norm_stderr\": 0.012238615750316508\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.4007352941176471,\n \"acc_stderr\": 0.029768263528933116,\n \"acc_norm\": 0.4007352941176471,\n \"acc_norm_stderr\": 0.029768263528933116\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.4624183006535948,\n \"acc_stderr\": 0.020170614974969768,\n \"acc_norm\": 0.4624183006535948,\n \"acc_norm_stderr\": 0.020170614974969768\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5272727272727272,\n \"acc_stderr\": 0.04782001791380061,\n \"acc_norm\": 0.5272727272727272,\n \"acc_norm_stderr\": 0.04782001791380061\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.49795918367346936,\n \"acc_stderr\": 0.0320089533497105,\n \"acc_norm\": 0.49795918367346936,\n \"acc_norm_stderr\": 0.0320089533497105\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6517412935323383,\n \"acc_stderr\": 0.033687874661154596,\n \"acc_norm\": 0.6517412935323383,\n \"acc_norm_stderr\": 0.033687874661154596\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.45180722891566266,\n \"acc_stderr\": 0.03874371556587953,\n \"acc_norm\": 0.45180722891566266,\n \"acc_norm_stderr\": 0.03874371556587953\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.6900584795321637,\n \"acc_stderr\": 0.035469769593931624,\n \"acc_norm\": 0.6900584795321637,\n \"acc_norm_stderr\": 0.035469769593931624\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2974296205630355,\n \"mc1_stderr\": 0.016002651487361002,\n \"mc2\": 0.45875295870930416,\n \"mc2_stderr\": 0.01569027399570652\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7071823204419889,\n \"acc_stderr\": 0.012789321118542618\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.17816527672479152,\n \"acc_stderr\": 0.01054013252754948\n }\n}\n```", "repo_url": "https://huggingface.co/wang7776/Llama-2-7b-chat-hf-20-sparsity", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|arc:challenge|25_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|gsm8k|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hellaswag|10_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T13-55-29.096442.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["**/details_harness|winogrande|5_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T13-55-29.096442.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T13_55_29.096442", "path": ["results_2023-12-16T13-55-29.096442.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T13-55-29.096442.parquet"]}]}]}
2023-12-16T13:59:08+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of wang7776/Llama-2-7b-chat-hf-20-sparsity Dataset automatically created during the evaluation run of model wang7776/Llama-2-7b-chat-hf-20-sparsity on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T13:55:29.096442(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of wang7776/Llama-2-7b-chat-hf-20-sparsity\n\n\n\nDataset automatically created during the evaluation run of model wang7776/Llama-2-7b-chat-hf-20-sparsity on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T13:55:29.096442(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of wang7776/Llama-2-7b-chat-hf-20-sparsity\n\n\n\nDataset automatically created during the evaluation run of model wang7776/Llama-2-7b-chat-hf-20-sparsity on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T13:55:29.096442(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 199, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of wang7776/Llama-2-7b-chat-hf-20-sparsity\n\n\n\nDataset automatically created during the evaluation run of model wang7776/Llama-2-7b-chat-hf-20-sparsity on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T13:55:29.096442(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
faed3f55b9cfa6408e79d7a6f64748b757c7b997
# Dataset Card for "alpaca_format2" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
harshithvh/alpaca_format2
[ "region:us" ]
2023-12-16T14:07:43+00:00
{"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 582263, "num_examples": 100}], "download_size": 169676, "dataset_size": 582263}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-12-16T14:08:05+00:00
[]
[]
TAGS #region-us
# Dataset Card for "alpaca_format2" More Information needed
[ "# Dataset Card for \"alpaca_format2\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"alpaca_format2\"\n\nMore Information needed" ]
[ 6, 15 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"alpaca_format2\"\n\nMore Information needed" ]
1b97cbb3f7c4a67c270553dec995a246012eddcf
# Dataset Card for Evaluation run of luffycodes/llama-class-shishya-7b-ep3 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [luffycodes/llama-class-shishya-7b-ep3](https://huggingface.co/luffycodes/llama-class-shishya-7b-ep3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_luffycodes__llama-class-shishya-7b-ep3", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T14:18:29.787522](https://huggingface.co/datasets/open-llm-leaderboard/details_luffycodes__llama-class-shishya-7b-ep3/blob/main/results_2023-12-16T14-18-29.787522.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.46443607121383107, "acc_stderr": 0.03429100219375034, "acc_norm": 0.4715546923049019, "acc_norm_stderr": 0.03522731903306309, "mc1": 0.1835985312117503, "mc1_stderr": 0.013553186376880936, "mc2": 0.27936842078964386, "mc2_stderr": 0.013103829305712317 }, "harness|arc:challenge|25": { "acc": 0.3848122866894198, "acc_stderr": 0.014218371065251098, "acc_norm": 0.40784982935153585, "acc_norm_stderr": 0.014361097288449701 }, "harness|hellaswag|10": { "acc": 0.5943039235212109, "acc_stderr": 0.004900227226433388, "acc_norm": 0.7703644692292372, "acc_norm_stderr": 0.0041973886269400665 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.27, "acc_stderr": 0.044619604333847415, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847415 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.45925925925925926, "acc_stderr": 0.04304979692464243, "acc_norm": 0.45925925925925926, "acc_norm_stderr": 0.04304979692464243 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5, "acc_stderr": 0.04068942293855797, "acc_norm": 0.5, "acc_norm_stderr": 0.04068942293855797 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5471698113207547, "acc_stderr": 0.030635627957961816, "acc_norm": 0.5471698113207547, "acc_norm_stderr": 0.030635627957961816 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.4930555555555556, "acc_stderr": 0.04180806750294938, "acc_norm": 0.4930555555555556, "acc_norm_stderr": 0.04180806750294938 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.41040462427745666, "acc_stderr": 0.03750757044895537, "acc_norm": 0.41040462427745666, "acc_norm_stderr": 0.03750757044895537 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.24509803921568626, "acc_stderr": 0.04280105837364396, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.04280105837364396 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.40425531914893614, "acc_stderr": 0.03208115750788684, "acc_norm": 0.40425531914893614, "acc_norm_stderr": 0.03208115750788684 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.34210526315789475, "acc_stderr": 0.04462917535336936, "acc_norm": 0.34210526315789475, "acc_norm_stderr": 0.04462917535336936 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.41379310344827586, "acc_stderr": 0.04104269211806232, "acc_norm": 0.41379310344827586, "acc_norm_stderr": 0.04104269211806232 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2962962962962963, "acc_stderr": 0.023517294335963286, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.023517294335963286 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.24603174603174602, "acc_stderr": 0.038522733649243156, "acc_norm": 0.24603174603174602, "acc_norm_stderr": 0.038522733649243156 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.5193548387096775, "acc_stderr": 0.028422687404312107, "acc_norm": 0.5193548387096775, "acc_norm_stderr": 0.028422687404312107 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.3793103448275862, "acc_stderr": 0.03413963805906235, "acc_norm": 0.3793103448275862, "acc_norm_stderr": 0.03413963805906235 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.39, "acc_stderr": 0.04902071300001974, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001974 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.593939393939394, "acc_stderr": 0.03834816355401181, "acc_norm": 0.593939393939394, "acc_norm_stderr": 0.03834816355401181 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.5808080808080808, "acc_stderr": 0.03515520728670417, "acc_norm": 0.5808080808080808, "acc_norm_stderr": 0.03515520728670417 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.6476683937823834, "acc_stderr": 0.03447478286414357, "acc_norm": 0.6476683937823834, "acc_norm_stderr": 0.03447478286414357 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.40512820512820513, "acc_stderr": 0.024890471769938145, "acc_norm": 0.40512820512820513, "acc_norm_stderr": 0.024890471769938145 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2518518518518518, "acc_stderr": 0.026466117538959916, "acc_norm": 0.2518518518518518, "acc_norm_stderr": 0.026466117538959916 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.42016806722689076, "acc_stderr": 0.03206183783236152, "acc_norm": 0.42016806722689076, "acc_norm_stderr": 0.03206183783236152 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.32450331125827814, "acc_stderr": 0.03822746937658753, "acc_norm": 0.32450331125827814, "acc_norm_stderr": 0.03822746937658753 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.6862385321100918, "acc_stderr": 0.019894723341469127, "acc_norm": 0.6862385321100918, "acc_norm_stderr": 0.019894723341469127 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.32407407407407407, "acc_stderr": 0.03191923445686186, "acc_norm": 0.32407407407407407, "acc_norm_stderr": 0.03191923445686186 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.6127450980392157, "acc_stderr": 0.03418931233833344, "acc_norm": 0.6127450980392157, "acc_norm_stderr": 0.03418931233833344 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.6329113924050633, "acc_stderr": 0.03137624072561618, "acc_norm": 0.6329113924050633, "acc_norm_stderr": 0.03137624072561618 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.5650224215246636, "acc_stderr": 0.033272833702713445, "acc_norm": 0.5650224215246636, "acc_norm_stderr": 0.033272833702713445 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.549618320610687, "acc_stderr": 0.04363643698524779, "acc_norm": 0.549618320610687, "acc_norm_stderr": 0.04363643698524779 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6446280991735537, "acc_stderr": 0.0436923632657398, "acc_norm": 0.6446280991735537, "acc_norm_stderr": 0.0436923632657398 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.5370370370370371, "acc_stderr": 0.04820403072760628, "acc_norm": 0.5370370370370371, "acc_norm_stderr": 0.04820403072760628 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.5276073619631901, "acc_stderr": 0.0392237829061099, "acc_norm": 0.5276073619631901, "acc_norm_stderr": 0.0392237829061099 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.33035714285714285, "acc_stderr": 0.04464285714285714, "acc_norm": 0.33035714285714285, "acc_norm_stderr": 0.04464285714285714 }, "harness|hendrycksTest-management|5": { "acc": 0.6407766990291263, "acc_stderr": 0.047504583990416946, "acc_norm": 0.6407766990291263, "acc_norm_stderr": 0.047504583990416946 }, "harness|hendrycksTest-marketing|5": { "acc": 0.6965811965811965, "acc_stderr": 0.030118210106942638, "acc_norm": 0.6965811965811965, "acc_norm_stderr": 0.030118210106942638 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.6717752234993615, "acc_stderr": 0.01679168564019289, "acc_norm": 0.6717752234993615, "acc_norm_stderr": 0.01679168564019289 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.4913294797687861, "acc_stderr": 0.026915047355369804, "acc_norm": 0.4913294797687861, "acc_norm_stderr": 0.026915047355369804 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.24581005586592178, "acc_stderr": 0.014400296429225589, "acc_norm": 0.24581005586592178, "acc_norm_stderr": 0.014400296429225589 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.46405228758169936, "acc_stderr": 0.028555827516528777, "acc_norm": 0.46405228758169936, "acc_norm_stderr": 0.028555827516528777 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.572347266881029, "acc_stderr": 0.02809924077580956, "acc_norm": 0.572347266881029, "acc_norm_stderr": 0.02809924077580956 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5462962962962963, "acc_stderr": 0.027701228468542595, "acc_norm": 0.5462962962962963, "acc_norm_stderr": 0.027701228468542595 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.3404255319148936, "acc_stderr": 0.02826765748265015, "acc_norm": 0.3404255319148936, "acc_norm_stderr": 0.02826765748265015 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.3324641460234681, "acc_stderr": 0.012032022332260514, "acc_norm": 0.3324641460234681, "acc_norm_stderr": 0.012032022332260514 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.4338235294117647, "acc_stderr": 0.03010563657001664, "acc_norm": 0.4338235294117647, "acc_norm_stderr": 0.03010563657001664 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.4493464052287582, "acc_stderr": 0.020123766528027266, "acc_norm": 0.4493464052287582, "acc_norm_stderr": 0.020123766528027266 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5363636363636364, "acc_stderr": 0.04776449162396197, "acc_norm": 0.5363636363636364, "acc_norm_stderr": 0.04776449162396197 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.4857142857142857, "acc_stderr": 0.03199615232806287, "acc_norm": 0.4857142857142857, "acc_norm_stderr": 0.03199615232806287 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6119402985074627, "acc_stderr": 0.0344578996436275, "acc_norm": 0.6119402985074627, "acc_norm_stderr": 0.0344578996436275 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.62, "acc_stderr": 0.04878317312145632, "acc_norm": 0.62, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-virology|5": { "acc": 0.41566265060240964, "acc_stderr": 0.038367221765980515, "acc_norm": 0.41566265060240964, "acc_norm_stderr": 0.038367221765980515 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.6783625730994152, "acc_stderr": 0.03582529442573122, "acc_norm": 0.6783625730994152, "acc_norm_stderr": 0.03582529442573122 }, "harness|truthfulqa:mc|0": { "mc1": 0.1835985312117503, "mc1_stderr": 0.013553186376880936, "mc2": 0.27936842078964386, "mc2_stderr": 0.013103829305712317 }, "harness|winogrande|5": { "acc": 0.7079715864246251, "acc_stderr": 0.01277919849175402 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_luffycodes__llama-class-shishya-7b-ep3
[ "region:us" ]
2023-12-16T14:21:25+00:00
{"pretty_name": "Evaluation run of luffycodes/llama-class-shishya-7b-ep3", "dataset_summary": "Dataset automatically created during the evaluation run of model [luffycodes/llama-class-shishya-7b-ep3](https://huggingface.co/luffycodes/llama-class-shishya-7b-ep3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_luffycodes__llama-class-shishya-7b-ep3\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T14:18:29.787522](https://huggingface.co/datasets/open-llm-leaderboard/details_luffycodes__llama-class-shishya-7b-ep3/blob/main/results_2023-12-16T14-18-29.787522.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.46443607121383107,\n \"acc_stderr\": 0.03429100219375034,\n \"acc_norm\": 0.4715546923049019,\n \"acc_norm_stderr\": 0.03522731903306309,\n \"mc1\": 0.1835985312117503,\n \"mc1_stderr\": 0.013553186376880936,\n \"mc2\": 0.27936842078964386,\n \"mc2_stderr\": 0.013103829305712317\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.3848122866894198,\n \"acc_stderr\": 0.014218371065251098,\n \"acc_norm\": 0.40784982935153585,\n \"acc_norm_stderr\": 0.014361097288449701\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5943039235212109,\n \"acc_stderr\": 0.004900227226433388,\n \"acc_norm\": 0.7703644692292372,\n \"acc_norm_stderr\": 0.0041973886269400665\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847415,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847415\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.45925925925925926,\n \"acc_stderr\": 0.04304979692464243,\n \"acc_norm\": 0.45925925925925926,\n \"acc_norm_stderr\": 0.04304979692464243\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.04068942293855797,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.04068942293855797\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5471698113207547,\n \"acc_stderr\": 0.030635627957961816,\n \"acc_norm\": 0.5471698113207547,\n \"acc_norm_stderr\": 0.030635627957961816\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.4930555555555556,\n \"acc_stderr\": 0.04180806750294938,\n \"acc_norm\": 0.4930555555555556,\n \"acc_norm_stderr\": 0.04180806750294938\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.41040462427745666,\n \"acc_stderr\": 0.03750757044895537,\n \"acc_norm\": 0.41040462427745666,\n \"acc_norm_stderr\": 0.03750757044895537\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.24509803921568626,\n \"acc_stderr\": 0.04280105837364396,\n \"acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.04280105837364396\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.40425531914893614,\n \"acc_stderr\": 0.03208115750788684,\n \"acc_norm\": 0.40425531914893614,\n \"acc_norm_stderr\": 0.03208115750788684\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.34210526315789475,\n \"acc_stderr\": 0.04462917535336936,\n \"acc_norm\": 0.34210526315789475,\n \"acc_norm_stderr\": 0.04462917535336936\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.41379310344827586,\n \"acc_stderr\": 0.04104269211806232,\n \"acc_norm\": 0.41379310344827586,\n \"acc_norm_stderr\": 0.04104269211806232\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2962962962962963,\n \"acc_stderr\": 0.023517294335963286,\n \"acc_norm\": 0.2962962962962963,\n \"acc_norm_stderr\": 0.023517294335963286\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.24603174603174602,\n \"acc_stderr\": 0.038522733649243156,\n \"acc_norm\": 0.24603174603174602,\n \"acc_norm_stderr\": 0.038522733649243156\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.5193548387096775,\n \"acc_stderr\": 0.028422687404312107,\n \"acc_norm\": 0.5193548387096775,\n \"acc_norm_stderr\": 0.028422687404312107\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.3793103448275862,\n \"acc_stderr\": 0.03413963805906235,\n \"acc_norm\": 0.3793103448275862,\n \"acc_norm_stderr\": 0.03413963805906235\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001974,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001974\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.593939393939394,\n \"acc_stderr\": 0.03834816355401181,\n \"acc_norm\": 0.593939393939394,\n \"acc_norm_stderr\": 0.03834816355401181\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.5808080808080808,\n \"acc_stderr\": 0.03515520728670417,\n \"acc_norm\": 0.5808080808080808,\n \"acc_norm_stderr\": 0.03515520728670417\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.6476683937823834,\n \"acc_stderr\": 0.03447478286414357,\n \"acc_norm\": 0.6476683937823834,\n \"acc_norm_stderr\": 0.03447478286414357\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.40512820512820513,\n \"acc_stderr\": 0.024890471769938145,\n \"acc_norm\": 0.40512820512820513,\n \"acc_norm_stderr\": 0.024890471769938145\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2518518518518518,\n \"acc_stderr\": 0.026466117538959916,\n \"acc_norm\": 0.2518518518518518,\n \"acc_norm_stderr\": 0.026466117538959916\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.42016806722689076,\n \"acc_stderr\": 0.03206183783236152,\n \"acc_norm\": 0.42016806722689076,\n \"acc_norm_stderr\": 0.03206183783236152\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.32450331125827814,\n \"acc_stderr\": 0.03822746937658753,\n \"acc_norm\": 0.32450331125827814,\n \"acc_norm_stderr\": 0.03822746937658753\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.6862385321100918,\n \"acc_stderr\": 0.019894723341469127,\n \"acc_norm\": 0.6862385321100918,\n \"acc_norm_stderr\": 0.019894723341469127\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.32407407407407407,\n \"acc_stderr\": 0.03191923445686186,\n \"acc_norm\": 0.32407407407407407,\n \"acc_norm_stderr\": 0.03191923445686186\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.6127450980392157,\n \"acc_stderr\": 0.03418931233833344,\n \"acc_norm\": 0.6127450980392157,\n \"acc_norm_stderr\": 0.03418931233833344\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.6329113924050633,\n \"acc_stderr\": 0.03137624072561618,\n \"acc_norm\": 0.6329113924050633,\n \"acc_norm_stderr\": 0.03137624072561618\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.5650224215246636,\n \"acc_stderr\": 0.033272833702713445,\n \"acc_norm\": 0.5650224215246636,\n \"acc_norm_stderr\": 0.033272833702713445\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.549618320610687,\n \"acc_stderr\": 0.04363643698524779,\n \"acc_norm\": 0.549618320610687,\n \"acc_norm_stderr\": 0.04363643698524779\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6446280991735537,\n \"acc_stderr\": 0.0436923632657398,\n \"acc_norm\": 0.6446280991735537,\n \"acc_norm_stderr\": 0.0436923632657398\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.5370370370370371,\n \"acc_stderr\": 0.04820403072760628,\n \"acc_norm\": 0.5370370370370371,\n \"acc_norm_stderr\": 0.04820403072760628\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.5276073619631901,\n \"acc_stderr\": 0.0392237829061099,\n \"acc_norm\": 0.5276073619631901,\n \"acc_norm_stderr\": 0.0392237829061099\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.33035714285714285,\n \"acc_stderr\": 0.04464285714285714,\n \"acc_norm\": 0.33035714285714285,\n \"acc_norm_stderr\": 0.04464285714285714\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6407766990291263,\n \"acc_stderr\": 0.047504583990416946,\n \"acc_norm\": 0.6407766990291263,\n \"acc_norm_stderr\": 0.047504583990416946\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.6965811965811965,\n \"acc_stderr\": 0.030118210106942638,\n \"acc_norm\": 0.6965811965811965,\n \"acc_norm_stderr\": 0.030118210106942638\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6717752234993615,\n \"acc_stderr\": 0.01679168564019289,\n \"acc_norm\": 0.6717752234993615,\n \"acc_norm_stderr\": 0.01679168564019289\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.4913294797687861,\n \"acc_stderr\": 0.026915047355369804,\n \"acc_norm\": 0.4913294797687861,\n \"acc_norm_stderr\": 0.026915047355369804\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.24581005586592178,\n \"acc_stderr\": 0.014400296429225589,\n \"acc_norm\": 0.24581005586592178,\n \"acc_norm_stderr\": 0.014400296429225589\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.46405228758169936,\n \"acc_stderr\": 0.028555827516528777,\n \"acc_norm\": 0.46405228758169936,\n \"acc_norm_stderr\": 0.028555827516528777\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.572347266881029,\n \"acc_stderr\": 0.02809924077580956,\n \"acc_norm\": 0.572347266881029,\n \"acc_norm_stderr\": 0.02809924077580956\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.5462962962962963,\n \"acc_stderr\": 0.027701228468542595,\n \"acc_norm\": 0.5462962962962963,\n \"acc_norm_stderr\": 0.027701228468542595\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.3404255319148936,\n \"acc_stderr\": 0.02826765748265015,\n \"acc_norm\": 0.3404255319148936,\n \"acc_norm_stderr\": 0.02826765748265015\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3324641460234681,\n \"acc_stderr\": 0.012032022332260514,\n \"acc_norm\": 0.3324641460234681,\n \"acc_norm_stderr\": 0.012032022332260514\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.4338235294117647,\n \"acc_stderr\": 0.03010563657001664,\n \"acc_norm\": 0.4338235294117647,\n \"acc_norm_stderr\": 0.03010563657001664\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.4493464052287582,\n \"acc_stderr\": 0.020123766528027266,\n \"acc_norm\": 0.4493464052287582,\n \"acc_norm_stderr\": 0.020123766528027266\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5363636363636364,\n \"acc_stderr\": 0.04776449162396197,\n \"acc_norm\": 0.5363636363636364,\n \"acc_norm_stderr\": 0.04776449162396197\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.4857142857142857,\n \"acc_stderr\": 0.03199615232806287,\n \"acc_norm\": 0.4857142857142857,\n \"acc_norm_stderr\": 0.03199615232806287\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6119402985074627,\n \"acc_stderr\": 0.0344578996436275,\n \"acc_norm\": 0.6119402985074627,\n \"acc_norm_stderr\": 0.0344578996436275\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.41566265060240964,\n \"acc_stderr\": 0.038367221765980515,\n \"acc_norm\": 0.41566265060240964,\n \"acc_norm_stderr\": 0.038367221765980515\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.6783625730994152,\n \"acc_stderr\": 0.03582529442573122,\n \"acc_norm\": 0.6783625730994152,\n \"acc_norm_stderr\": 0.03582529442573122\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.1835985312117503,\n \"mc1_stderr\": 0.013553186376880936,\n \"mc2\": 0.27936842078964386,\n \"mc2_stderr\": 0.013103829305712317\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7079715864246251,\n \"acc_stderr\": 0.01277919849175402\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/luffycodes/llama-class-shishya-7b-ep3", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|arc:challenge|25_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|gsm8k|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hellaswag|10_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T14-18-29.787522.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["**/details_harness|winogrande|5_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T14-18-29.787522.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T14_18_29.787522", "path": ["results_2023-12-16T14-18-29.787522.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T14-18-29.787522.parquet"]}]}]}
2023-12-16T14:22:07+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of luffycodes/llama-class-shishya-7b-ep3 Dataset automatically created during the evaluation run of model luffycodes/llama-class-shishya-7b-ep3 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T14:18:29.787522(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of luffycodes/llama-class-shishya-7b-ep3\n\n\n\nDataset automatically created during the evaluation run of model luffycodes/llama-class-shishya-7b-ep3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T14:18:29.787522(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of luffycodes/llama-class-shishya-7b-ep3\n\n\n\nDataset automatically created during the evaluation run of model luffycodes/llama-class-shishya-7b-ep3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T14:18:29.787522(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 195, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of luffycodes/llama-class-shishya-7b-ep3\n\n\n\nDataset automatically created during the evaluation run of model luffycodes/llama-class-shishya-7b-ep3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T14:18:29.787522(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
3bc794bb1a628d3e63afe40252d6a806240ec96f
# Dataset Card for Evaluation run of fblgit/una-cybertron-7b-v3-OMA <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [fblgit/una-cybertron-7b-v3-OMA](https://huggingface.co/fblgit/una-cybertron-7b-v3-OMA) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_fblgit__una-cybertron-7b-v3-OMA", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T14:22:11.823260](https://huggingface.co/datasets/open-llm-leaderboard/details_fblgit__una-cybertron-7b-v3-OMA/blob/main/results_2023-12-16T14-22-11.823260.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6407887213707157, "acc_stderr": 0.032306194957506966, "acc_norm": 0.6401991329877219, "acc_norm_stderr": 0.03297436021123899, "mc1": 0.5801713586291309, "mc1_stderr": 0.017277030301775766, "mc2": 0.6984571807866093, "mc2_stderr": 0.01516400593831668 }, "harness|arc:challenge|25": { "acc": 0.7098976109215017, "acc_stderr": 0.013261573677520766, "acc_norm": 0.7303754266211604, "acc_norm_stderr": 0.012968040686869155 }, "harness|hellaswag|10": { "acc": 0.7183827922724557, "acc_stderr": 0.0044886843979795015, "acc_norm": 0.8794064927305317, "acc_norm_stderr": 0.0032498873947065044 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6148148148148148, "acc_stderr": 0.04203921040156279, "acc_norm": 0.6148148148148148, "acc_norm_stderr": 0.04203921040156279 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6842105263157895, "acc_stderr": 0.0378272898086547, "acc_norm": 0.6842105263157895, "acc_norm_stderr": 0.0378272898086547 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.59, "acc_stderr": 0.049431107042371025, "acc_norm": 0.59, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6943396226415094, "acc_stderr": 0.028353298073322666, "acc_norm": 0.6943396226415094, "acc_norm_stderr": 0.028353298073322666 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7291666666666666, "acc_stderr": 0.03716177437566017, "acc_norm": 0.7291666666666666, "acc_norm_stderr": 0.03716177437566017 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.53, "acc_stderr": 0.050161355804659205, "acc_norm": 0.53, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6473988439306358, "acc_stderr": 0.03643037168958548, "acc_norm": 0.6473988439306358, "acc_norm_stderr": 0.03643037168958548 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.37254901960784315, "acc_stderr": 0.048108401480826346, "acc_norm": 0.37254901960784315, "acc_norm_stderr": 0.048108401480826346 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.042923469599092816, "acc_norm": 0.76, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5574468085106383, "acc_stderr": 0.03246956919789958, "acc_norm": 0.5574468085106383, "acc_norm_stderr": 0.03246956919789958 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.04702880432049615, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5517241379310345, "acc_stderr": 0.04144311810878152, "acc_norm": 0.5517241379310345, "acc_norm_stderr": 0.04144311810878152 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3968253968253968, "acc_stderr": 0.02519710107424649, "acc_norm": 0.3968253968253968, "acc_norm_stderr": 0.02519710107424649 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4523809523809524, "acc_stderr": 0.04451807959055328, "acc_norm": 0.4523809523809524, "acc_norm_stderr": 0.04451807959055328 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8, "acc_stderr": 0.022755204959542943, "acc_norm": 0.8, "acc_norm_stderr": 0.022755204959542943 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.46798029556650245, "acc_stderr": 0.035107665979592154, "acc_norm": 0.46798029556650245, "acc_norm_stderr": 0.035107665979592154 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7575757575757576, "acc_stderr": 0.03346409881055953, "acc_norm": 0.7575757575757576, "acc_norm_stderr": 0.03346409881055953 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7777777777777778, "acc_stderr": 0.02962022787479047, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.02962022787479047 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8911917098445595, "acc_stderr": 0.022473253332768766, "acc_norm": 0.8911917098445595, "acc_norm_stderr": 0.022473253332768766 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6410256410256411, "acc_stderr": 0.024321738484602354, "acc_norm": 0.6410256410256411, "acc_norm_stderr": 0.024321738484602354 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.31851851851851853, "acc_stderr": 0.02840653309060846, "acc_norm": 0.31851851851851853, "acc_norm_stderr": 0.02840653309060846 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.634453781512605, "acc_stderr": 0.031282177063684614, "acc_norm": 0.634453781512605, "acc_norm_stderr": 0.031282177063684614 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2980132450331126, "acc_stderr": 0.037345356767871984, "acc_norm": 0.2980132450331126, "acc_norm_stderr": 0.037345356767871984 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8366972477064221, "acc_stderr": 0.01584825580650155, "acc_norm": 0.8366972477064221, "acc_norm_stderr": 0.01584825580650155 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5046296296296297, "acc_stderr": 0.03409825519163572, "acc_norm": 0.5046296296296297, "acc_norm_stderr": 0.03409825519163572 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8137254901960784, "acc_stderr": 0.027325470966716312, "acc_norm": 0.8137254901960784, "acc_norm_stderr": 0.027325470966716312 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7890295358649789, "acc_stderr": 0.02655837250266192, "acc_norm": 0.7890295358649789, "acc_norm_stderr": 0.02655837250266192 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.695067264573991, "acc_stderr": 0.030898610882477515, "acc_norm": 0.695067264573991, "acc_norm_stderr": 0.030898610882477515 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7557251908396947, "acc_stderr": 0.037683359597287434, "acc_norm": 0.7557251908396947, "acc_norm_stderr": 0.037683359597287434 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7520661157024794, "acc_stderr": 0.03941897526516301, "acc_norm": 0.7520661157024794, "acc_norm_stderr": 0.03941897526516301 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7685185185185185, "acc_stderr": 0.04077494709252627, "acc_norm": 0.7685185185185185, "acc_norm_stderr": 0.04077494709252627 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7423312883435583, "acc_stderr": 0.03436150827846917, "acc_norm": 0.7423312883435583, "acc_norm_stderr": 0.03436150827846917 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4642857142857143, "acc_stderr": 0.04733667890053756, "acc_norm": 0.4642857142857143, "acc_norm_stderr": 0.04733667890053756 }, "harness|hendrycksTest-management|5": { "acc": 0.7766990291262136, "acc_stderr": 0.04123553189891431, "acc_norm": 0.7766990291262136, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8803418803418803, "acc_stderr": 0.021262719400406957, "acc_norm": 0.8803418803418803, "acc_norm_stderr": 0.021262719400406957 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.68, "acc_stderr": 0.046882617226215034, "acc_norm": 0.68, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8275862068965517, "acc_stderr": 0.013507943909371802, "acc_norm": 0.8275862068965517, "acc_norm_stderr": 0.013507943909371802 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7196531791907514, "acc_stderr": 0.024182427496577612, "acc_norm": 0.7196531791907514, "acc_norm_stderr": 0.024182427496577612 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.45251396648044695, "acc_stderr": 0.016646914804438775, "acc_norm": 0.45251396648044695, "acc_norm_stderr": 0.016646914804438775 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6993464052287581, "acc_stderr": 0.02625605383571896, "acc_norm": 0.6993464052287581, "acc_norm_stderr": 0.02625605383571896 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7041800643086816, "acc_stderr": 0.025922371788818763, "acc_norm": 0.7041800643086816, "acc_norm_stderr": 0.025922371788818763 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7469135802469136, "acc_stderr": 0.024191808600712995, "acc_norm": 0.7469135802469136, "acc_norm_stderr": 0.024191808600712995 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4645390070921986, "acc_stderr": 0.029752389657427047, "acc_norm": 0.4645390070921986, "acc_norm_stderr": 0.029752389657427047 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46479791395045633, "acc_stderr": 0.012738547371303957, "acc_norm": 0.46479791395045633, "acc_norm_stderr": 0.012738547371303957 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6654411764705882, "acc_stderr": 0.0286619962023353, "acc_norm": 0.6654411764705882, "acc_norm_stderr": 0.0286619962023353 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6584967320261438, "acc_stderr": 0.019184639328092487, "acc_norm": 0.6584967320261438, "acc_norm_stderr": 0.019184639328092487 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.0449429086625209, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.0449429086625209 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7306122448979592, "acc_stderr": 0.02840125202902294, "acc_norm": 0.7306122448979592, "acc_norm_stderr": 0.02840125202902294 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8507462686567164, "acc_stderr": 0.025196929874827075, "acc_norm": 0.8507462686567164, "acc_norm_stderr": 0.025196929874827075 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.87, "acc_stderr": 0.03379976689896309, "acc_norm": 0.87, "acc_norm_stderr": 0.03379976689896309 }, "harness|hendrycksTest-virology|5": { "acc": 0.5662650602409639, "acc_stderr": 0.03858158940685517, "acc_norm": 0.5662650602409639, "acc_norm_stderr": 0.03858158940685517 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.5801713586291309, "mc1_stderr": 0.017277030301775766, "mc2": 0.6984571807866093, "mc2_stderr": 0.01516400593831668 }, "harness|winogrande|5": { "acc": 0.8208366219415943, "acc_stderr": 0.010777949156047987 }, "harness|gsm8k|5": { "acc": 0.6770280515542078, "acc_stderr": 0.012880360794851815 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_fblgit__una-cybertron-7b-v3-OMA
[ "region:us" ]
2023-12-16T14:25:02+00:00
{"pretty_name": "Evaluation run of fblgit/una-cybertron-7b-v3-OMA", "dataset_summary": "Dataset automatically created during the evaluation run of model [fblgit/una-cybertron-7b-v3-OMA](https://huggingface.co/fblgit/una-cybertron-7b-v3-OMA) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_fblgit__una-cybertron-7b-v3-OMA\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T14:22:11.823260](https://huggingface.co/datasets/open-llm-leaderboard/details_fblgit__una-cybertron-7b-v3-OMA/blob/main/results_2023-12-16T14-22-11.823260.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6407887213707157,\n \"acc_stderr\": 0.032306194957506966,\n \"acc_norm\": 0.6401991329877219,\n \"acc_norm_stderr\": 0.03297436021123899,\n \"mc1\": 0.5801713586291309,\n \"mc1_stderr\": 0.017277030301775766,\n \"mc2\": 0.6984571807866093,\n \"mc2_stderr\": 0.01516400593831668\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.7098976109215017,\n \"acc_stderr\": 0.013261573677520766,\n \"acc_norm\": 0.7303754266211604,\n \"acc_norm_stderr\": 0.012968040686869155\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7183827922724557,\n \"acc_stderr\": 0.0044886843979795015,\n \"acc_norm\": 0.8794064927305317,\n \"acc_norm_stderr\": 0.0032498873947065044\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6148148148148148,\n \"acc_stderr\": 0.04203921040156279,\n \"acc_norm\": 0.6148148148148148,\n \"acc_norm_stderr\": 0.04203921040156279\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6842105263157895,\n \"acc_stderr\": 0.0378272898086547,\n \"acc_norm\": 0.6842105263157895,\n \"acc_norm_stderr\": 0.0378272898086547\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.59,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6943396226415094,\n \"acc_stderr\": 0.028353298073322666,\n \"acc_norm\": 0.6943396226415094,\n \"acc_norm_stderr\": 0.028353298073322666\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7291666666666666,\n \"acc_stderr\": 0.03716177437566017,\n \"acc_norm\": 0.7291666666666666,\n \"acc_norm_stderr\": 0.03716177437566017\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6473988439306358,\n \"acc_stderr\": 0.03643037168958548,\n \"acc_norm\": 0.6473988439306358,\n \"acc_norm_stderr\": 0.03643037168958548\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.37254901960784315,\n \"acc_stderr\": 0.048108401480826346,\n \"acc_norm\": 0.37254901960784315,\n \"acc_norm_stderr\": 0.048108401480826346\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5574468085106383,\n \"acc_stderr\": 0.03246956919789958,\n \"acc_norm\": 0.5574468085106383,\n \"acc_norm_stderr\": 0.03246956919789958\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5517241379310345,\n \"acc_stderr\": 0.04144311810878152,\n \"acc_norm\": 0.5517241379310345,\n \"acc_norm_stderr\": 0.04144311810878152\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3968253968253968,\n \"acc_stderr\": 0.02519710107424649,\n \"acc_norm\": 0.3968253968253968,\n \"acc_norm_stderr\": 0.02519710107424649\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4523809523809524,\n \"acc_stderr\": 0.04451807959055328,\n \"acc_norm\": 0.4523809523809524,\n \"acc_norm_stderr\": 0.04451807959055328\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252604,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252604\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.022755204959542943,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.022755204959542943\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.46798029556650245,\n \"acc_stderr\": 0.035107665979592154,\n \"acc_norm\": 0.46798029556650245,\n \"acc_norm_stderr\": 0.035107665979592154\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7575757575757576,\n \"acc_stderr\": 0.03346409881055953,\n \"acc_norm\": 0.7575757575757576,\n \"acc_norm_stderr\": 0.03346409881055953\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.02962022787479047,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.02962022787479047\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8911917098445595,\n \"acc_stderr\": 0.022473253332768766,\n \"acc_norm\": 0.8911917098445595,\n \"acc_norm_stderr\": 0.022473253332768766\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6410256410256411,\n \"acc_stderr\": 0.024321738484602354,\n \"acc_norm\": 0.6410256410256411,\n \"acc_norm_stderr\": 0.024321738484602354\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.31851851851851853,\n \"acc_stderr\": 0.02840653309060846,\n \"acc_norm\": 0.31851851851851853,\n \"acc_norm_stderr\": 0.02840653309060846\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.634453781512605,\n \"acc_stderr\": 0.031282177063684614,\n \"acc_norm\": 0.634453781512605,\n \"acc_norm_stderr\": 0.031282177063684614\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2980132450331126,\n \"acc_stderr\": 0.037345356767871984,\n \"acc_norm\": 0.2980132450331126,\n \"acc_norm_stderr\": 0.037345356767871984\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8366972477064221,\n \"acc_stderr\": 0.01584825580650155,\n \"acc_norm\": 0.8366972477064221,\n \"acc_norm_stderr\": 0.01584825580650155\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5046296296296297,\n \"acc_stderr\": 0.03409825519163572,\n \"acc_norm\": 0.5046296296296297,\n \"acc_norm_stderr\": 0.03409825519163572\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8137254901960784,\n \"acc_stderr\": 0.027325470966716312,\n \"acc_norm\": 0.8137254901960784,\n \"acc_norm_stderr\": 0.027325470966716312\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7890295358649789,\n \"acc_stderr\": 0.02655837250266192,\n \"acc_norm\": 0.7890295358649789,\n \"acc_norm_stderr\": 0.02655837250266192\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.695067264573991,\n \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7557251908396947,\n \"acc_stderr\": 0.037683359597287434,\n \"acc_norm\": 0.7557251908396947,\n \"acc_norm_stderr\": 0.037683359597287434\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7520661157024794,\n \"acc_stderr\": 0.03941897526516301,\n \"acc_norm\": 0.7520661157024794,\n \"acc_norm_stderr\": 0.03941897526516301\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7685185185185185,\n \"acc_stderr\": 0.04077494709252627,\n \"acc_norm\": 0.7685185185185185,\n \"acc_norm_stderr\": 0.04077494709252627\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7423312883435583,\n \"acc_stderr\": 0.03436150827846917,\n \"acc_norm\": 0.7423312883435583,\n \"acc_norm_stderr\": 0.03436150827846917\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8803418803418803,\n \"acc_stderr\": 0.021262719400406957,\n \"acc_norm\": 0.8803418803418803,\n \"acc_norm_stderr\": 0.021262719400406957\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8275862068965517,\n \"acc_stderr\": 0.013507943909371802,\n \"acc_norm\": 0.8275862068965517,\n \"acc_norm_stderr\": 0.013507943909371802\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7196531791907514,\n \"acc_stderr\": 0.024182427496577612,\n \"acc_norm\": 0.7196531791907514,\n \"acc_norm_stderr\": 0.024182427496577612\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.45251396648044695,\n \"acc_stderr\": 0.016646914804438775,\n \"acc_norm\": 0.45251396648044695,\n \"acc_norm_stderr\": 0.016646914804438775\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6993464052287581,\n \"acc_stderr\": 0.02625605383571896,\n \"acc_norm\": 0.6993464052287581,\n \"acc_norm_stderr\": 0.02625605383571896\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7041800643086816,\n \"acc_stderr\": 0.025922371788818763,\n \"acc_norm\": 0.7041800643086816,\n \"acc_norm_stderr\": 0.025922371788818763\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7469135802469136,\n \"acc_stderr\": 0.024191808600712995,\n \"acc_norm\": 0.7469135802469136,\n \"acc_norm_stderr\": 0.024191808600712995\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4645390070921986,\n \"acc_stderr\": 0.029752389657427047,\n \"acc_norm\": 0.4645390070921986,\n \"acc_norm_stderr\": 0.029752389657427047\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46479791395045633,\n \"acc_stderr\": 0.012738547371303957,\n \"acc_norm\": 0.46479791395045633,\n \"acc_norm_stderr\": 0.012738547371303957\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6654411764705882,\n \"acc_stderr\": 0.0286619962023353,\n \"acc_norm\": 0.6654411764705882,\n \"acc_norm_stderr\": 0.0286619962023353\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6584967320261438,\n \"acc_stderr\": 0.019184639328092487,\n \"acc_norm\": 0.6584967320261438,\n \"acc_norm_stderr\": 0.019184639328092487\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7306122448979592,\n \"acc_stderr\": 0.02840125202902294,\n \"acc_norm\": 0.7306122448979592,\n \"acc_norm_stderr\": 0.02840125202902294\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8507462686567164,\n \"acc_stderr\": 0.025196929874827075,\n \"acc_norm\": 0.8507462686567164,\n \"acc_norm_stderr\": 0.025196929874827075\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.87,\n \"acc_stderr\": 0.03379976689896309,\n \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.03379976689896309\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5662650602409639,\n \"acc_stderr\": 0.03858158940685517,\n \"acc_norm\": 0.5662650602409639,\n \"acc_norm_stderr\": 0.03858158940685517\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5801713586291309,\n \"mc1_stderr\": 0.017277030301775766,\n \"mc2\": 0.6984571807866093,\n \"mc2_stderr\": 0.01516400593831668\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8208366219415943,\n \"acc_stderr\": 0.010777949156047987\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6770280515542078,\n \"acc_stderr\": 0.012880360794851815\n }\n}\n```", "repo_url": "https://huggingface.co/fblgit/una-cybertron-7b-v3-OMA", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|arc:challenge|25_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|gsm8k|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hellaswag|10_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T14-22-11.823260.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["**/details_harness|winogrande|5_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T14-22-11.823260.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T14_22_11.823260", "path": ["results_2023-12-16T14-22-11.823260.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T14-22-11.823260.parquet"]}]}]}
2023-12-16T14:25:46+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of fblgit/una-cybertron-7b-v3-OMA Dataset automatically created during the evaluation run of model fblgit/una-cybertron-7b-v3-OMA on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T14:22:11.823260(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of fblgit/una-cybertron-7b-v3-OMA\n\n\n\nDataset automatically created during the evaluation run of model fblgit/una-cybertron-7b-v3-OMA on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T14:22:11.823260(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of fblgit/una-cybertron-7b-v3-OMA\n\n\n\nDataset automatically created during the evaluation run of model fblgit/una-cybertron-7b-v3-OMA on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T14:22:11.823260(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 191, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of fblgit/una-cybertron-7b-v3-OMA\n\n\n\nDataset automatically created during the evaluation run of model fblgit/una-cybertron-7b-v3-OMA on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T14:22:11.823260(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
9dbdd1c5d27791b9bd1816c9ccdfc1c3fb752ace
# Dataset Card for PELCRA benchmark corpora ## Table of Contents - [Table of Contents](#table-of-contents) - [Dataset Description](#dataset-description) - [Dataset Summary](#dataset-summary) - [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards) - [Languages](#languages) - [Dataset Structure](#dataset-structure) - [Data Instances](#data-instances) - [Data Fields](#data-fields) - [Data Splits](#data-splits) - [Dataset Creation](#dataset-creation) - [Curation Rationale](#curation-rationale) - [Source Data](#source-data) - [Annotations](#annotations) - [Personal and Sensitive Information](#personal-and-sensitive-information) - [Considerations for Using the Data](#considerations-for-using-the-data) - [Social Impact of Dataset](#social-impact-of-dataset) - [Discussion of Biases](#discussion-of-biases) - [Other Known Limitations](#other-known-limitations) - [Additional Information](#additional-information) - [Dataset Curators](#dataset-curators) - [Licensing Information](#licensing-information) - [Citation Information](#citation-information) - [Contributions](#contributions) ## Dataset Description - **Homepage:** https://huggingface.co/datasets/pelcra/pl-asr-bigos-pelcra-v2 - **Repository:** https://github.com/goodmike31/pl-asr-bigos-tools - **Paper:** https://aclanthology.org/L18-1678.pdf, https://annals-csis.org/proceedings/2023/drp/1609.html - **Leaderboard:** https://huggingface.co/spaces/michaljunczyk/pl-asr-bigos-benchmark - **Point of Contact:** [email protected], [email protected] ### Dataset Summary This repository contains subsets of the SpokesMix ([SpokesMix](http://docs.pelcra.pl/doku.php?id=spokes_documentation), [SpokesBiz](http://docs.pelcra.pl/doku.php?id=spokesbiz) and [Diabiz](http://docs.pelcra.pl/doku.php?id=diabiz)) corpora processed in the format of the [BIGOS](https://huggingface.co/datasets/amu-cai/pl-asr-bigos-v2) (Benchmark Intended Grouping of Open Speech) format. The main contribution of these corpora are a) spontaneous and conversional speech and b) phone-based customer interactions. The inclusion of the subsets in the [BIGOS](https://huggingface.co/datasets/amu-cai/pl-asr-bigos-v2) corpora, will hopefully result in the most comprehensive publicly available evaluation of Polish ASR systems in terms of number of speakers, devices and acoustic conditions. Further contributions to the benchmark datasets from the community are highly welcomed. <br> ### Supported Tasks and Leaderboards The PELCRA test sets are intented for benchmarking Polish ASR systems under the 23/24 PolEval challenge. ### Languages Polish ## Dataset Structure The datasets consist of audio recordings in the WAV format with corresponding metadata.<br> The audio and metadata can be used in a raw format (TSV) or via the hugging face datasets library.<br> References for the test split will only become available after the completion of the 23/24 PolEval challenge.<br> ### Data Instances ### Data Fields Available fields: * audioname - file identifier * split - test, validation or train split * dataset - source dataset identifier * audio - binary representation of audio file * ref_orig - original transcription of audio file * samplingrate_orig - sampling rate of the original recording * sampling_rate - sampling rate of recording in the release * audiopath_bigos - audio filepath after extraction of tar.gz archive <br><br> ### Data Splits Train split contains recordings intendend for training. Validation split contains recordings for validation during training procedure. Test split contains recordings intended for evaluation only. References for test split are not available until the completion of 23/24 PolEval challenge. | Subset | train | validation | test | | --------------------- | ------ | ---------- | ----- | | ul-diabiz_poleval-22 | 7 719 | 284 | 947 | | ul-spokes_biz_bio-23 | 45 488 | 3 910 | 5 519 | | ul-spokes_biz_int-23 | 726 | 82 | 301 | | ul-spokes_biz_luz-23 | 32 539 | 5 528 | 3 899 | | ul-spokes_biz_pod-23 | 19 511 | 2 260 | 1 036 | | ul-spokes_biz_pres-23 | 14 165 | 1 780 | 1 229 | | ul-spokes_biz_vc-23 | 36 897 | 2 819 | 5 556 | | ul-spokes_biz_vc2-23 | 20 187 | 2 384 | 3 231 | | ul-spokes_biz_wyw-23 | 9 711 | 1 501 | 145 | | ul-spokes_mix_emo-18 | 20 186 | 3 147 | 996 | | ul-spokes_mix_luz-18 | 14 840 | 4 324 | 1 755 | | ul-spokes_mix_parl-18 | 7 181 | 513 | 962 | ## Dataset Creation ### Curation Rationale The dataset was curated in order to enable convenient use of training, validation and test corpora across all publically available ASR speech datasets for Polish. Thanks to similar format of PELCRA and BIGOS corpora and convenient access via hugging face platform ASR practioners are able to leverage vast amount of diverse set of recordings with reduced overhead. Recordings which either lacked transcriptions or were too short to be useful for training or evaluation were removed during curation. ### Source Data Data was sourced from the below original datasets with the original authors' permission: * [SpokesMix](http://docs.pelcra.pl/doku.php?id=spoken_offline_corpora) corpus * [SpokesBiz](http://docs.pelcra.pl/doku.php?id=spokesbiz") corpus. * [PolEval DiaBiz sample](http://docs.pelcra.pl/doku.php?id=diabiz) corpus. #### Initial Data Collection and Normalization Source text and audio files were extracted and encoded in a unified format.<br> Original dataset-specific transcription norms are preserved, including punctuation and casing. <br> #### Who are the source language producers? The PELCRA SpokesMix, SpokesBiz and DiaBiz datasets were created by:<br> Piotr Pęzik, Michał Adamczyk, Małgorzata Krawentek, Paweł Wilk, Sylwia Karasińska, Angelika Peljak-Łapińska, Karolina Adamczyk, Monika Garnys, Karolina Walkusz, Anna Cichosz, Anna Kwiatkowska, Mikołaj Deckert, Paulina Rybińska, Izabela Grabarczyk, Maciej Grabski, Karol Ługowski, Michał Koźmiński, Zuzanna Deckert, Piotr Górniak, Konrad Kaczyński, Łukasz Jałowiecki and others. <br> The curation and inclusion of subsets of these corpora into the BIGOS format was done by Michał Junczyk from Adam Mickiewicz University. ### Annotations #### Annotation process Current release contains original, manually performed transcriptions. #### Who are the annotators? Participants of the original projects. ### Personal and Sensitive Information This corpus does not contain PII or Sensitive Information. All IDs of speakers are anonymized. ## Considerations for Using the Data ### Social Impact of Dataset To be updated. ### Discussion of Biases To be updated. ### Other Known Limitations Some metadata avaiable in the original releases is not available in the initial HF release e.g. speaker age, sex etc. ## Additional Information ### Dataset Curators Original authors of the source datasets: Piotr Pęzik ([email protected]) et al. please refer to [source-data](#source-data) for details. Michał Junczyk ([email protected]) - curator of PELCRA corpora in the BIGOS format. ### Licensing Information The BIGOS benchmark is available under [Creative Commons By Attribution Non Commercial No Derivates](https://creativecommons.org/licenses/by-nc-nd/4.0/deed.en) ### Citation Information Please cite all papers enlisted on the original authors pages.<br> * [SpokesMix](https://aclanthology.org/L18-1678) corpus * [SpokesBiz](http://docs.pelcra.pl/doku.php?id=spokesbiz") corpus. * [PolEval DiaBiz sample](https://aclanthology.org/2022.lrec-1.76/) corpus. ### Contributions Thanks to [@goodmike31](https://github.com/goodmike31) for adding this dataset.
pelcra/pl-asr-pelcra-for-bigos
[ "task_categories:automatic-speech-recognition", "annotations_creators:expert-generated", "language_creators:expert-generated", "multilinguality:monolingual", "size_categories:10K<n<100K", "source_datasets:original", "language:pl", "license:cc-by-nc-nd-4.0", "benchmark", "polish", "asr", "speech", "clarin", "pelcra", "region:us" ]
2023-12-16T14:37:08+00:00
{"annotations_creators": ["expert-generated"], "language_creators": ["expert-generated"], "language": ["pl"], "license": ["cc-by-nc-nd-4.0"], "multilinguality": ["monolingual"], "size_categories": ["10K<n<100K"], "source_datasets": ["original"], "task_categories": ["automatic-speech-recognition"], "task_ids": [], "pretty_name": "pl-asr-pelcra-for-bigos", "tags": ["benchmark", "polish", "asr", "speech", "clarin", "pelcra"], "extra_gated_prompt": "This is curated version of Spokes Mix, Spokes Biz and Diabiz Poleval subset corpora. Please get familiar with the specific terms of usage and make sure you understood and agreed to them before use. Below are the links to the license terms and datasets the specific license type applies to:\n* [Creative Commons By Attribution Non Commercial No Derivates](https://creativecommons.org/licenses/by-nc-nd/4.0/deed.en) which applies to [SpokesMix](http://spokes.clarin-pl.eu/) and [SpokesBiz](http://docs.pelcra.pl/doku.php?id=spokesbiz\") corpora.\n* Public domain license which applies to [PolEval DiaBiz sample](http://poleval.pl/tasks/task1) corpus.", "extra_gated_fields": {"I hereby confirm that I have read and accepted the license terms of datasets comprising aforementioned corpora": "checkbox"}}
2024-02-03T22:07:58+00:00
[]
[ "pl" ]
TAGS #task_categories-automatic-speech-recognition #annotations_creators-expert-generated #language_creators-expert-generated #multilinguality-monolingual #size_categories-10K<n<100K #source_datasets-original #language-Polish #license-cc-by-nc-nd-4.0 #benchmark #polish #asr #speech #clarin #pelcra #region-us
Dataset Card for PELCRA benchmark corpora ========================================= Table of Contents ----------------- * Table of Contents * Dataset Description + Dataset Summary + Supported Tasks and Leaderboards + Languages * Dataset Structure + Data Instances + Data Fields + Data Splits * Dataset Creation + Curation Rationale + Source Data + Annotations + Personal and Sensitive Information * Considerations for Using the Data + Social Impact of Dataset + Discussion of Biases + Other Known Limitations * Additional Information + Dataset Curators + Licensing Information + Citation Information + Contributions Dataset Description ------------------- * Homepage: URL * Repository: URL * Paper: URL URL * Leaderboard: URL * Point of Contact: michal.junczyk@URL, URL@URL ### Dataset Summary This repository contains subsets of the SpokesMix (SpokesMix, SpokesBiz and Diabiz) corpora processed in the format of the BIGOS (Benchmark Intended Grouping of Open Speech) format. The main contribution of these corpora are a) spontaneous and conversional speech and b) phone-based customer interactions. The inclusion of the subsets in the BIGOS corpora, will hopefully result in the most comprehensive publicly available evaluation of Polish ASR systems in terms of number of speakers, devices and acoustic conditions. Further contributions to the benchmark datasets from the community are highly welcomed. ### Supported Tasks and Leaderboards The PELCRA test sets are intented for benchmarking Polish ASR systems under the 23/24 PolEval challenge. ### Languages Polish Dataset Structure ----------------- The datasets consist of audio recordings in the WAV format with corresponding metadata. The audio and metadata can be used in a raw format (TSV) or via the hugging face datasets library. References for the test split will only become available after the completion of the 23/24 PolEval challenge. ### Data Instances ### Data Fields Available fields: * audioname - file identifier * split - test, validation or train split * dataset - source dataset identifier * audio - binary representation of audio file * ref\_orig - original transcription of audio file * samplingrate\_orig - sampling rate of the original recording * sampling\_rate - sampling rate of recording in the release * audiopath\_bigos - audio filepath after extraction of URL archive ### Data Splits Train split contains recordings intendend for training. Validation split contains recordings for validation during training procedure. Test split contains recordings intended for evaluation only. References for test split are not available until the completion of 23/24 PolEval challenge. Dataset Creation ---------------- ### Curation Rationale The dataset was curated in order to enable convenient use of training, validation and test corpora across all publically available ASR speech datasets for Polish. Thanks to similar format of PELCRA and BIGOS corpora and convenient access via hugging face platform ASR practioners are able to leverage vast amount of diverse set of recordings with reduced overhead. Recordings which either lacked transcriptions or were too short to be useful for training or evaluation were removed during curation. ### Source Data Data was sourced from the below original datasets with the original authors' permission: * SpokesMix corpus * SpokesBiz corpus. * PolEval DiaBiz sample corpus. #### Initial Data Collection and Normalization Source text and audio files were extracted and encoded in a unified format. Original dataset-specific transcription norms are preserved, including punctuation and casing. #### Who are the source language producers? The PELCRA SpokesMix, SpokesBiz and DiaBiz datasets were created by: Piotr Pęzik, Michał Adamczyk, Małgorzata Krawentek, Paweł Wilk, Sylwia Karasińska, Angelika Peljak-Łapińska, Karolina Adamczyk, Monika Garnys, Karolina Walkusz, Anna Cichosz, Anna Kwiatkowska, Mikołaj Deckert, Paulina Rybińska, Izabela Grabarczyk, Maciej Grabski, Karol Ługowski, Michał Koźmiński, Zuzanna Deckert, Piotr Górniak, Konrad Kaczyński, Łukasz Jałowiecki and others. The curation and inclusion of subsets of these corpora into the BIGOS format was done by Michał Junczyk from Adam Mickiewicz University. ### Annotations #### Annotation process Current release contains original, manually performed transcriptions. #### Who are the annotators? Participants of the original projects. ### Personal and Sensitive Information This corpus does not contain PII or Sensitive Information. All IDs of speakers are anonymized. Considerations for Using the Data --------------------------------- ### Social Impact of Dataset To be updated. ### Discussion of Biases To be updated. ### Other Known Limitations Some metadata avaiable in the original releases is not available in the initial HF release e.g. speaker age, sex etc. Additional Information ---------------------- ### Dataset Curators Original authors of the source datasets: Piotr Pęzik (URL@URL) et al. please refer to source-data for details. Michał Junczyk (michal.junczyk@URL) - curator of PELCRA corpora in the BIGOS format. ### Licensing Information The BIGOS benchmark is available under Creative Commons By Attribution Non Commercial No Derivates Please cite all papers enlisted on the original authors pages. * SpokesMix corpus * SpokesBiz corpus. * PolEval DiaBiz sample corpus. ### Contributions Thanks to @goodmike31 for adding this dataset.
[ "### Dataset Summary\n\n\nThis repository contains subsets of the SpokesMix (SpokesMix, SpokesBiz and Diabiz) corpora\nprocessed in the format of the BIGOS (Benchmark Intended Grouping of Open Speech) format.\nThe main contribution of these corpora are a) spontaneous and conversional speech and b) phone-based customer interactions.\nThe inclusion of the subsets in the BIGOS corpora, will hopefully result in\nthe most comprehensive publicly available evaluation of Polish ASR systems in terms of number of speakers, devices and acoustic conditions.\nFurther contributions to the benchmark datasets from the community are highly welcomed.", "### Supported Tasks and Leaderboards\n\n\nThe PELCRA test sets are intented for benchmarking Polish ASR systems under the 23/24 PolEval challenge.", "### Languages\n\n\nPolish\n\n\nDataset Structure\n-----------------\n\n\nThe datasets consist of audio recordings in the WAV format with corresponding metadata. \n\nThe audio and metadata can be used in a raw format (TSV) or via the hugging face datasets library. \n\nReferences for the test split will only become available after the completion of the 23/24 PolEval challenge.", "### Data Instances", "### Data Fields\n\n\nAvailable fields:\n\n\n* audioname - file identifier\n* split - test, validation or train split\n* dataset - source dataset identifier\n* audio - binary representation of audio file\n* ref\\_orig - original transcription of audio file\n* samplingrate\\_orig - sampling rate of the original recording\n* sampling\\_rate - sampling rate of recording in the release\n* audiopath\\_bigos - audio filepath after extraction of URL archive", "### Data Splits\n\n\nTrain split contains recordings intendend for training.\nValidation split contains recordings for validation during training procedure.\nTest split contains recordings intended for evaluation only.\nReferences for test split are not available until the completion of 23/24 PolEval challenge.\n\n\n\nDataset Creation\n----------------", "### Curation Rationale\n\n\nThe dataset was curated in order to enable convenient use of training, validation and test corpora across all publically available ASR speech datasets for Polish. Thanks to similar format of PELCRA and BIGOS corpora and convenient access via hugging face platform ASR practioners are able to leverage vast amount of diverse set of recordings with reduced overhead.\nRecordings which either lacked transcriptions or were too short to be useful for training or evaluation were removed during curation.", "### Source Data\n\n\nData was sourced from the below original datasets with the original authors' permission:\n\n\n* SpokesMix corpus\n* SpokesBiz corpus.\n* PolEval DiaBiz sample corpus.", "#### Initial Data Collection and Normalization\n\n\nSource text and audio files were extracted and encoded in a unified format. \n\nOriginal dataset-specific transcription norms are preserved, including punctuation and casing.", "#### Who are the source language producers?\n\n\nThe PELCRA SpokesMix, SpokesBiz and DiaBiz datasets were created by: \n\nPiotr Pęzik, Michał Adamczyk, Małgorzata Krawentek, Paweł Wilk, Sylwia Karasińska, Angelika Peljak-Łapińska, Karolina Adamczyk, Monika Garnys, Karolina Walkusz, Anna Cichosz, Anna Kwiatkowska, Mikołaj Deckert, Paulina Rybińska, Izabela Grabarczyk, Maciej Grabski, Karol Ługowski, Michał Koźmiński, Zuzanna Deckert, Piotr Górniak, Konrad Kaczyński, Łukasz Jałowiecki and others. \n\n\n\nThe curation and inclusion of subsets of these corpora into the BIGOS format was done by Michał Junczyk from Adam Mickiewicz University.", "### Annotations", "#### Annotation process\n\n\nCurrent release contains original, manually performed transcriptions.", "#### Who are the annotators?\n\n\nParticipants of the original projects.", "### Personal and Sensitive Information\n\n\nThis corpus does not contain PII or Sensitive Information.\nAll IDs of speakers are anonymized.\n\n\nConsiderations for Using the Data\n---------------------------------", "### Social Impact of Dataset\n\n\nTo be updated.", "### Discussion of Biases\n\n\nTo be updated.", "### Other Known Limitations\n\n\nSome metadata avaiable in the original releases is not available in the initial HF release e.g. speaker age, sex etc.\n\n\nAdditional Information\n----------------------", "### Dataset Curators\n\n\nOriginal authors of the source datasets: Piotr Pęzik (URL@URL) et al. please refer to source-data for details.\nMichał Junczyk (michal.junczyk@URL) - curator of PELCRA corpora in the BIGOS format.", "### Licensing Information\n\n\nThe BIGOS benchmark is available under Creative Commons By Attribution Non Commercial No Derivates\n\n\nPlease cite all papers enlisted on the original authors pages. \n\n\n\n* SpokesMix corpus\n* SpokesBiz corpus.\n* PolEval DiaBiz sample corpus.", "### Contributions\n\n\nThanks to @goodmike31 for adding this dataset." ]
[ "TAGS\n#task_categories-automatic-speech-recognition #annotations_creators-expert-generated #language_creators-expert-generated #multilinguality-monolingual #size_categories-10K<n<100K #source_datasets-original #language-Polish #license-cc-by-nc-nd-4.0 #benchmark #polish #asr #speech #clarin #pelcra #region-us \n", "### Dataset Summary\n\n\nThis repository contains subsets of the SpokesMix (SpokesMix, SpokesBiz and Diabiz) corpora\nprocessed in the format of the BIGOS (Benchmark Intended Grouping of Open Speech) format.\nThe main contribution of these corpora are a) spontaneous and conversional speech and b) phone-based customer interactions.\nThe inclusion of the subsets in the BIGOS corpora, will hopefully result in\nthe most comprehensive publicly available evaluation of Polish ASR systems in terms of number of speakers, devices and acoustic conditions.\nFurther contributions to the benchmark datasets from the community are highly welcomed.", "### Supported Tasks and Leaderboards\n\n\nThe PELCRA test sets are intented for benchmarking Polish ASR systems under the 23/24 PolEval challenge.", "### Languages\n\n\nPolish\n\n\nDataset Structure\n-----------------\n\n\nThe datasets consist of audio recordings in the WAV format with corresponding metadata. \n\nThe audio and metadata can be used in a raw format (TSV) or via the hugging face datasets library. \n\nReferences for the test split will only become available after the completion of the 23/24 PolEval challenge.", "### Data Instances", "### Data Fields\n\n\nAvailable fields:\n\n\n* audioname - file identifier\n* split - test, validation or train split\n* dataset - source dataset identifier\n* audio - binary representation of audio file\n* ref\\_orig - original transcription of audio file\n* samplingrate\\_orig - sampling rate of the original recording\n* sampling\\_rate - sampling rate of recording in the release\n* audiopath\\_bigos - audio filepath after extraction of URL archive", "### Data Splits\n\n\nTrain split contains recordings intendend for training.\nValidation split contains recordings for validation during training procedure.\nTest split contains recordings intended for evaluation only.\nReferences for test split are not available until the completion of 23/24 PolEval challenge.\n\n\n\nDataset Creation\n----------------", "### Curation Rationale\n\n\nThe dataset was curated in order to enable convenient use of training, validation and test corpora across all publically available ASR speech datasets for Polish. Thanks to similar format of PELCRA and BIGOS corpora and convenient access via hugging face platform ASR practioners are able to leverage vast amount of diverse set of recordings with reduced overhead.\nRecordings which either lacked transcriptions or were too short to be useful for training or evaluation were removed during curation.", "### Source Data\n\n\nData was sourced from the below original datasets with the original authors' permission:\n\n\n* SpokesMix corpus\n* SpokesBiz corpus.\n* PolEval DiaBiz sample corpus.", "#### Initial Data Collection and Normalization\n\n\nSource text and audio files were extracted and encoded in a unified format. \n\nOriginal dataset-specific transcription norms are preserved, including punctuation and casing.", "#### Who are the source language producers?\n\n\nThe PELCRA SpokesMix, SpokesBiz and DiaBiz datasets were created by: \n\nPiotr Pęzik, Michał Adamczyk, Małgorzata Krawentek, Paweł Wilk, Sylwia Karasińska, Angelika Peljak-Łapińska, Karolina Adamczyk, Monika Garnys, Karolina Walkusz, Anna Cichosz, Anna Kwiatkowska, Mikołaj Deckert, Paulina Rybińska, Izabela Grabarczyk, Maciej Grabski, Karol Ługowski, Michał Koźmiński, Zuzanna Deckert, Piotr Górniak, Konrad Kaczyński, Łukasz Jałowiecki and others. \n\n\n\nThe curation and inclusion of subsets of these corpora into the BIGOS format was done by Michał Junczyk from Adam Mickiewicz University.", "### Annotations", "#### Annotation process\n\n\nCurrent release contains original, manually performed transcriptions.", "#### Who are the annotators?\n\n\nParticipants of the original projects.", "### Personal and Sensitive Information\n\n\nThis corpus does not contain PII or Sensitive Information.\nAll IDs of speakers are anonymized.\n\n\nConsiderations for Using the Data\n---------------------------------", "### Social Impact of Dataset\n\n\nTo be updated.", "### Discussion of Biases\n\n\nTo be updated.", "### Other Known Limitations\n\n\nSome metadata avaiable in the original releases is not available in the initial HF release e.g. speaker age, sex etc.\n\n\nAdditional Information\n----------------------", "### Dataset Curators\n\n\nOriginal authors of the source datasets: Piotr Pęzik (URL@URL) et al. please refer to source-data for details.\nMichał Junczyk (michal.junczyk@URL) - curator of PELCRA corpora in the BIGOS format.", "### Licensing Information\n\n\nThe BIGOS benchmark is available under Creative Commons By Attribution Non Commercial No Derivates\n\n\nPlease cite all papers enlisted on the original authors pages. \n\n\n\n* SpokesMix corpus\n* SpokesBiz corpus.\n* PolEval DiaBiz sample corpus.", "### Contributions\n\n\nThanks to @goodmike31 for adding this dataset." ]
[ 111, 149, 37, 84, 6, 113, 68, 113, 44, 50, 180, 5, 19, 16, 41, 11, 12, 44, 63, 61, 18 ]
[ "passage: TAGS\n#task_categories-automatic-speech-recognition #annotations_creators-expert-generated #language_creators-expert-generated #multilinguality-monolingual #size_categories-10K<n<100K #source_datasets-original #language-Polish #license-cc-by-nc-nd-4.0 #benchmark #polish #asr #speech #clarin #pelcra #region-us \n### Dataset Summary\n\n\nThis repository contains subsets of the SpokesMix (SpokesMix, SpokesBiz and Diabiz) corpora\nprocessed in the format of the BIGOS (Benchmark Intended Grouping of Open Speech) format.\nThe main contribution of these corpora are a) spontaneous and conversional speech and b) phone-based customer interactions.\nThe inclusion of the subsets in the BIGOS corpora, will hopefully result in\nthe most comprehensive publicly available evaluation of Polish ASR systems in terms of number of speakers, devices and acoustic conditions.\nFurther contributions to the benchmark datasets from the community are highly welcomed.### Supported Tasks and Leaderboards\n\n\nThe PELCRA test sets are intented for benchmarking Polish ASR systems under the 23/24 PolEval challenge.### Languages\n\n\nPolish\n\n\nDataset Structure\n-----------------\n\n\nThe datasets consist of audio recordings in the WAV format with corresponding metadata. \n\nThe audio and metadata can be used in a raw format (TSV) or via the hugging face datasets library. \n\nReferences for the test split will only become available after the completion of the 23/24 PolEval challenge.### Data Instances### Data Fields\n\n\nAvailable fields:\n\n\n* audioname - file identifier\n* split - test, validation or train split\n* dataset - source dataset identifier\n* audio - binary representation of audio file\n* ref\\_orig - original transcription of audio file\n* samplingrate\\_orig - sampling rate of the original recording\n* sampling\\_rate - sampling rate of recording in the release\n* audiopath\\_bigos - audio filepath after extraction of URL archive", "passage: ### Data Splits\n\n\nTrain split contains recordings intendend for training.\nValidation split contains recordings for validation during training procedure.\nTest split contains recordings intended for evaluation only.\nReferences for test split are not available until the completion of 23/24 PolEval challenge.\n\n\n\nDataset Creation\n----------------### Curation Rationale\n\n\nThe dataset was curated in order to enable convenient use of training, validation and test corpora across all publically available ASR speech datasets for Polish. Thanks to similar format of PELCRA and BIGOS corpora and convenient access via hugging face platform ASR practioners are able to leverage vast amount of diverse set of recordings with reduced overhead.\nRecordings which either lacked transcriptions or were too short to be useful for training or evaluation were removed during curation.### Source Data\n\n\nData was sourced from the below original datasets with the original authors' permission:\n\n\n* SpokesMix corpus\n* SpokesBiz corpus.\n* PolEval DiaBiz sample corpus.#### Initial Data Collection and Normalization\n\n\nSource text and audio files were extracted and encoded in a unified format. \n\nOriginal dataset-specific transcription norms are preserved, including punctuation and casing.#### Who are the source language producers?\n\n\nThe PELCRA SpokesMix, SpokesBiz and DiaBiz datasets were created by: \n\nPiotr Pęzik, Michał Adamczyk, Małgorzata Krawentek, Paweł Wilk, Sylwia Karasińska, Angelika Peljak-Łapińska, Karolina Adamczyk, Monika Garnys, Karolina Walkusz, Anna Cichosz, Anna Kwiatkowska, Mikołaj Deckert, Paulina Rybińska, Izabela Grabarczyk, Maciej Grabski, Karol Ługowski, Michał Koźmiński, Zuzanna Deckert, Piotr Górniak, Konrad Kaczyński, Łukasz Jałowiecki and others. \n\n\n\nThe curation and inclusion of subsets of these corpora into the BIGOS format was done by Michał Junczyk from Adam Mickiewicz University.### Annotations#### Annotation process\n\n\nCurrent release contains original, manually performed transcriptions.#### Who are the annotators?\n\n\nParticipants of the original projects.### Personal and Sensitive Information\n\n\nThis corpus does not contain PII or Sensitive Information.\nAll IDs of speakers are anonymized.\n\n\nConsiderations for Using the Data\n---------------------------------### Social Impact of Dataset\n\n\nTo be updated.### Discussion of Biases\n\n\nTo be updated." ]
c69c10a97238fc231a2b7a9b5cbefa1af42f5fbd
# Dataset Card for Evaluation run of luffycodes/vicuna-class-shishya-7b-ep3 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [luffycodes/vicuna-class-shishya-7b-ep3](https://huggingface.co/luffycodes/vicuna-class-shishya-7b-ep3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_luffycodes__vicuna-class-shishya-7b-ep3", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T14:35:39.279406](https://huggingface.co/datasets/open-llm-leaderboard/details_luffycodes__vicuna-class-shishya-7b-ep3/blob/main/results_2023-12-16T14-35-39.279406.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5021536694384112, "acc_stderr": 0.03408136345498624, "acc_norm": 0.5103315759128656, "acc_norm_stderr": 0.03501169960455142, "mc1": 0.24479804161566707, "mc1_stderr": 0.015051869486715004, "mc2": 0.36866124332509587, "mc2_stderr": 0.014406507957340794 }, "harness|arc:challenge|25": { "acc": 0.39505119453924914, "acc_stderr": 0.014285898292938172, "acc_norm": 0.4061433447098976, "acc_norm_stderr": 0.01435165669009786 }, "harness|hellaswag|10": { "acc": 0.58105954989046, "acc_stderr": 0.004923772581848499, "acc_norm": 0.7671778530173272, "acc_norm_stderr": 0.004217661194937994 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.26, "acc_stderr": 0.04408440022768081, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768081 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4444444444444444, "acc_stderr": 0.04292596718256981, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.04292596718256981 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5, "acc_stderr": 0.04068942293855797, "acc_norm": 0.5, "acc_norm_stderr": 0.04068942293855797 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5547169811320755, "acc_stderr": 0.030588052974270655, "acc_norm": 0.5547169811320755, "acc_norm_stderr": 0.030588052974270655 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5, "acc_stderr": 0.04181210050035455, "acc_norm": 0.5, "acc_norm_stderr": 0.04181210050035455 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.35, "acc_stderr": 0.04793724854411019, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411019 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.4508670520231214, "acc_stderr": 0.0379401267469703, "acc_norm": 0.4508670520231214, "acc_norm_stderr": 0.0379401267469703 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.19607843137254902, "acc_stderr": 0.03950581861179962, "acc_norm": 0.19607843137254902, "acc_norm_stderr": 0.03950581861179962 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.63, "acc_stderr": 0.04852365870939099, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.43829787234042555, "acc_stderr": 0.03243618636108101, "acc_norm": 0.43829787234042555, "acc_norm_stderr": 0.03243618636108101 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.04434600701584925, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.04434600701584925 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.4896551724137931, "acc_stderr": 0.041657747757287644, "acc_norm": 0.4896551724137931, "acc_norm_stderr": 0.041657747757287644 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3148148148148148, "acc_stderr": 0.023919984164047732, "acc_norm": 0.3148148148148148, "acc_norm_stderr": 0.023919984164047732 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3253968253968254, "acc_stderr": 0.04190596438871136, "acc_norm": 0.3253968253968254, "acc_norm_stderr": 0.04190596438871136 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.5838709677419355, "acc_stderr": 0.028040981380761547, "acc_norm": 0.5838709677419355, "acc_norm_stderr": 0.028040981380761547 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.41379310344827586, "acc_stderr": 0.03465304488406795, "acc_norm": 0.41379310344827586, "acc_norm_stderr": 0.03465304488406795 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.43, "acc_stderr": 0.04975698519562428, "acc_norm": 0.43, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6484848484848484, "acc_stderr": 0.037282069986826503, "acc_norm": 0.6484848484848484, "acc_norm_stderr": 0.037282069986826503 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.601010101010101, "acc_stderr": 0.03488901616852732, "acc_norm": 0.601010101010101, "acc_norm_stderr": 0.03488901616852732 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7409326424870466, "acc_stderr": 0.03161877917935412, "acc_norm": 0.7409326424870466, "acc_norm_stderr": 0.03161877917935412 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5025641025641026, "acc_stderr": 0.025350672979412195, "acc_norm": 0.5025641025641026, "acc_norm_stderr": 0.025350672979412195 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2740740740740741, "acc_stderr": 0.027195934804085626, "acc_norm": 0.2740740740740741, "acc_norm_stderr": 0.027195934804085626 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.4789915966386555, "acc_stderr": 0.03244980849990029, "acc_norm": 0.4789915966386555, "acc_norm_stderr": 0.03244980849990029 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3509933774834437, "acc_stderr": 0.03896981964257375, "acc_norm": 0.3509933774834437, "acc_norm_stderr": 0.03896981964257375 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7064220183486238, "acc_stderr": 0.019525151122639667, "acc_norm": 0.7064220183486238, "acc_norm_stderr": 0.019525151122639667 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.3888888888888889, "acc_stderr": 0.03324708911809117, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.03324708911809117 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.696078431372549, "acc_stderr": 0.032282103870378914, "acc_norm": 0.696078431372549, "acc_norm_stderr": 0.032282103870378914 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.70042194092827, "acc_stderr": 0.029818024749753095, "acc_norm": 0.70042194092827, "acc_norm_stderr": 0.029818024749753095 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.5874439461883408, "acc_stderr": 0.03304062175449297, "acc_norm": 0.5874439461883408, "acc_norm_stderr": 0.03304062175449297 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6335877862595419, "acc_stderr": 0.04225875451969637, "acc_norm": 0.6335877862595419, "acc_norm_stderr": 0.04225875451969637 }, "harness|hendrycksTest-international_law|5": { "acc": 0.5950413223140496, "acc_stderr": 0.04481137755942469, "acc_norm": 0.5950413223140496, "acc_norm_stderr": 0.04481137755942469 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.5462962962962963, "acc_stderr": 0.04812917324536823, "acc_norm": 0.5462962962962963, "acc_norm_stderr": 0.04812917324536823 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.50920245398773, "acc_stderr": 0.03927705600787443, "acc_norm": 0.50920245398773, "acc_norm_stderr": 0.03927705600787443 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.38392857142857145, "acc_stderr": 0.04616143075028547, "acc_norm": 0.38392857142857145, "acc_norm_stderr": 0.04616143075028547 }, "harness|hendrycksTest-management|5": { "acc": 0.6699029126213593, "acc_stderr": 0.0465614711001235, "acc_norm": 0.6699029126213593, "acc_norm_stderr": 0.0465614711001235 }, "harness|hendrycksTest-marketing|5": { "acc": 0.782051282051282, "acc_stderr": 0.027046857630716677, "acc_norm": 0.782051282051282, "acc_norm_stderr": 0.027046857630716677 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.6641123882503193, "acc_stderr": 0.016889407235171686, "acc_norm": 0.6641123882503193, "acc_norm_stderr": 0.016889407235171686 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5664739884393064, "acc_stderr": 0.026680134761679217, "acc_norm": 0.5664739884393064, "acc_norm_stderr": 0.026680134761679217 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.26256983240223464, "acc_stderr": 0.014716824273017761, "acc_norm": 0.26256983240223464, "acc_norm_stderr": 0.014716824273017761 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.565359477124183, "acc_stderr": 0.028384256704883037, "acc_norm": 0.565359477124183, "acc_norm_stderr": 0.028384256704883037 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6045016077170418, "acc_stderr": 0.027770918531427838, "acc_norm": 0.6045016077170418, "acc_norm_stderr": 0.027770918531427838 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5648148148148148, "acc_stderr": 0.027586006221607708, "acc_norm": 0.5648148148148148, "acc_norm_stderr": 0.027586006221607708 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.3723404255319149, "acc_stderr": 0.028838921471251458, "acc_norm": 0.3723404255319149, "acc_norm_stderr": 0.028838921471251458 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.3728813559322034, "acc_stderr": 0.01235063005833336, "acc_norm": 0.3728813559322034, "acc_norm_stderr": 0.01235063005833336 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5183823529411765, "acc_stderr": 0.030352303395351964, "acc_norm": 0.5183823529411765, "acc_norm_stderr": 0.030352303395351964 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.4869281045751634, "acc_stderr": 0.020220920829626916, "acc_norm": 0.4869281045751634, "acc_norm_stderr": 0.020220920829626916 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6, "acc_stderr": 0.0469237132203465, "acc_norm": 0.6, "acc_norm_stderr": 0.0469237132203465 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6285714285714286, "acc_stderr": 0.03093285879278984, "acc_norm": 0.6285714285714286, "acc_norm_stderr": 0.03093285879278984 }, "harness|hendrycksTest-sociology|5": { "acc": 0.681592039800995, "acc_stderr": 0.03294118479054095, "acc_norm": 0.681592039800995, "acc_norm_stderr": 0.03294118479054095 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.72, "acc_stderr": 0.045126085985421276, "acc_norm": 0.72, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-virology|5": { "acc": 0.39759036144578314, "acc_stderr": 0.038099730845402184, "acc_norm": 0.39759036144578314, "acc_norm_stderr": 0.038099730845402184 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7485380116959064, "acc_stderr": 0.033275044238468436, "acc_norm": 0.7485380116959064, "acc_norm_stderr": 0.033275044238468436 }, "harness|truthfulqa:mc|0": { "mc1": 0.24479804161566707, "mc1_stderr": 0.015051869486715004, "mc2": 0.36866124332509587, "mc2_stderr": 0.014406507957340794 }, "harness|winogrande|5": { "acc": 0.7190213101815311, "acc_stderr": 0.012632541095875824 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_luffycodes__vicuna-class-shishya-7b-ep3
[ "region:us" ]
2023-12-16T14:38:34+00:00
{"pretty_name": "Evaluation run of luffycodes/vicuna-class-shishya-7b-ep3", "dataset_summary": "Dataset automatically created during the evaluation run of model [luffycodes/vicuna-class-shishya-7b-ep3](https://huggingface.co/luffycodes/vicuna-class-shishya-7b-ep3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_luffycodes__vicuna-class-shishya-7b-ep3\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T14:35:39.279406](https://huggingface.co/datasets/open-llm-leaderboard/details_luffycodes__vicuna-class-shishya-7b-ep3/blob/main/results_2023-12-16T14-35-39.279406.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5021536694384112,\n \"acc_stderr\": 0.03408136345498624,\n \"acc_norm\": 0.5103315759128656,\n \"acc_norm_stderr\": 0.03501169960455142,\n \"mc1\": 0.24479804161566707,\n \"mc1_stderr\": 0.015051869486715004,\n \"mc2\": 0.36866124332509587,\n \"mc2_stderr\": 0.014406507957340794\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.39505119453924914,\n \"acc_stderr\": 0.014285898292938172,\n \"acc_norm\": 0.4061433447098976,\n \"acc_norm_stderr\": 0.01435165669009786\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.58105954989046,\n \"acc_stderr\": 0.004923772581848499,\n \"acc_norm\": 0.7671778530173272,\n \"acc_norm_stderr\": 0.004217661194937994\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768081,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768081\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.04292596718256981,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.04292596718256981\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.04068942293855797,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.04068942293855797\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5547169811320755,\n \"acc_stderr\": 0.030588052974270655,\n \"acc_norm\": 0.5547169811320755,\n \"acc_norm_stderr\": 0.030588052974270655\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.04181210050035455,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.04181210050035455\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411019,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411019\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.4508670520231214,\n \"acc_stderr\": 0.0379401267469703,\n \"acc_norm\": 0.4508670520231214,\n \"acc_norm_stderr\": 0.0379401267469703\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.19607843137254902,\n \"acc_stderr\": 0.03950581861179962,\n \"acc_norm\": 0.19607843137254902,\n \"acc_norm_stderr\": 0.03950581861179962\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.43829787234042555,\n \"acc_stderr\": 0.03243618636108101,\n \"acc_norm\": 0.43829787234042555,\n \"acc_norm_stderr\": 0.03243618636108101\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.04434600701584925,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.04434600701584925\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.4896551724137931,\n \"acc_stderr\": 0.041657747757287644,\n \"acc_norm\": 0.4896551724137931,\n \"acc_norm_stderr\": 0.041657747757287644\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3148148148148148,\n \"acc_stderr\": 0.023919984164047732,\n \"acc_norm\": 0.3148148148148148,\n \"acc_norm_stderr\": 0.023919984164047732\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3253968253968254,\n \"acc_stderr\": 0.04190596438871136,\n \"acc_norm\": 0.3253968253968254,\n \"acc_norm_stderr\": 0.04190596438871136\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.5838709677419355,\n \"acc_stderr\": 0.028040981380761547,\n \"acc_norm\": 0.5838709677419355,\n \"acc_norm_stderr\": 0.028040981380761547\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.41379310344827586,\n \"acc_stderr\": 0.03465304488406795,\n \"acc_norm\": 0.41379310344827586,\n \"acc_norm_stderr\": 0.03465304488406795\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.6484848484848484,\n \"acc_stderr\": 0.037282069986826503,\n \"acc_norm\": 0.6484848484848484,\n \"acc_norm_stderr\": 0.037282069986826503\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.601010101010101,\n \"acc_stderr\": 0.03488901616852732,\n \"acc_norm\": 0.601010101010101,\n \"acc_norm_stderr\": 0.03488901616852732\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.7409326424870466,\n \"acc_stderr\": 0.03161877917935412,\n \"acc_norm\": 0.7409326424870466,\n \"acc_norm_stderr\": 0.03161877917935412\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5025641025641026,\n \"acc_stderr\": 0.025350672979412195,\n \"acc_norm\": 0.5025641025641026,\n \"acc_norm_stderr\": 0.025350672979412195\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2740740740740741,\n \"acc_stderr\": 0.027195934804085626,\n \"acc_norm\": 0.2740740740740741,\n \"acc_norm_stderr\": 0.027195934804085626\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.4789915966386555,\n \"acc_stderr\": 0.03244980849990029,\n \"acc_norm\": 0.4789915966386555,\n \"acc_norm_stderr\": 0.03244980849990029\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3509933774834437,\n \"acc_stderr\": 0.03896981964257375,\n \"acc_norm\": 0.3509933774834437,\n \"acc_norm_stderr\": 0.03896981964257375\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7064220183486238,\n \"acc_stderr\": 0.019525151122639667,\n \"acc_norm\": 0.7064220183486238,\n \"acc_norm_stderr\": 0.019525151122639667\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.3888888888888889,\n \"acc_stderr\": 0.03324708911809117,\n \"acc_norm\": 0.3888888888888889,\n \"acc_norm_stderr\": 0.03324708911809117\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.696078431372549,\n \"acc_stderr\": 0.032282103870378914,\n \"acc_norm\": 0.696078431372549,\n \"acc_norm_stderr\": 0.032282103870378914\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.70042194092827,\n \"acc_stderr\": 0.029818024749753095,\n \"acc_norm\": 0.70042194092827,\n \"acc_norm_stderr\": 0.029818024749753095\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.5874439461883408,\n \"acc_stderr\": 0.03304062175449297,\n \"acc_norm\": 0.5874439461883408,\n \"acc_norm_stderr\": 0.03304062175449297\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6335877862595419,\n \"acc_stderr\": 0.04225875451969637,\n \"acc_norm\": 0.6335877862595419,\n \"acc_norm_stderr\": 0.04225875451969637\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.5950413223140496,\n \"acc_stderr\": 0.04481137755942469,\n \"acc_norm\": 0.5950413223140496,\n \"acc_norm_stderr\": 0.04481137755942469\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.5462962962962963,\n \"acc_stderr\": 0.04812917324536823,\n \"acc_norm\": 0.5462962962962963,\n \"acc_norm_stderr\": 0.04812917324536823\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.50920245398773,\n \"acc_stderr\": 0.03927705600787443,\n \"acc_norm\": 0.50920245398773,\n \"acc_norm_stderr\": 0.03927705600787443\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.38392857142857145,\n \"acc_stderr\": 0.04616143075028547,\n \"acc_norm\": 0.38392857142857145,\n \"acc_norm_stderr\": 0.04616143075028547\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6699029126213593,\n \"acc_stderr\": 0.0465614711001235,\n \"acc_norm\": 0.6699029126213593,\n \"acc_norm_stderr\": 0.0465614711001235\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.782051282051282,\n \"acc_stderr\": 0.027046857630716677,\n \"acc_norm\": 0.782051282051282,\n \"acc_norm_stderr\": 0.027046857630716677\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6641123882503193,\n \"acc_stderr\": 0.016889407235171686,\n \"acc_norm\": 0.6641123882503193,\n \"acc_norm_stderr\": 0.016889407235171686\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.5664739884393064,\n \"acc_stderr\": 0.026680134761679217,\n \"acc_norm\": 0.5664739884393064,\n \"acc_norm_stderr\": 0.026680134761679217\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.26256983240223464,\n \"acc_stderr\": 0.014716824273017761,\n \"acc_norm\": 0.26256983240223464,\n \"acc_norm_stderr\": 0.014716824273017761\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.565359477124183,\n \"acc_stderr\": 0.028384256704883037,\n \"acc_norm\": 0.565359477124183,\n \"acc_norm_stderr\": 0.028384256704883037\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6045016077170418,\n \"acc_stderr\": 0.027770918531427838,\n \"acc_norm\": 0.6045016077170418,\n \"acc_norm_stderr\": 0.027770918531427838\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.5648148148148148,\n \"acc_stderr\": 0.027586006221607708,\n \"acc_norm\": 0.5648148148148148,\n \"acc_norm_stderr\": 0.027586006221607708\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.3723404255319149,\n \"acc_stderr\": 0.028838921471251458,\n \"acc_norm\": 0.3723404255319149,\n \"acc_norm_stderr\": 0.028838921471251458\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3728813559322034,\n \"acc_stderr\": 0.01235063005833336,\n \"acc_norm\": 0.3728813559322034,\n \"acc_norm_stderr\": 0.01235063005833336\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5183823529411765,\n \"acc_stderr\": 0.030352303395351964,\n \"acc_norm\": 0.5183823529411765,\n \"acc_norm_stderr\": 0.030352303395351964\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.4869281045751634,\n \"acc_stderr\": 0.020220920829626916,\n \"acc_norm\": 0.4869281045751634,\n \"acc_norm_stderr\": 0.020220920829626916\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.0469237132203465,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.0469237132203465\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6285714285714286,\n \"acc_stderr\": 0.03093285879278984,\n \"acc_norm\": 0.6285714285714286,\n \"acc_norm_stderr\": 0.03093285879278984\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.681592039800995,\n \"acc_stderr\": 0.03294118479054095,\n \"acc_norm\": 0.681592039800995,\n \"acc_norm_stderr\": 0.03294118479054095\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.39759036144578314,\n \"acc_stderr\": 0.038099730845402184,\n \"acc_norm\": 0.39759036144578314,\n \"acc_norm_stderr\": 0.038099730845402184\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7485380116959064,\n \"acc_stderr\": 0.033275044238468436,\n \"acc_norm\": 0.7485380116959064,\n \"acc_norm_stderr\": 0.033275044238468436\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.24479804161566707,\n \"mc1_stderr\": 0.015051869486715004,\n \"mc2\": 0.36866124332509587,\n \"mc2_stderr\": 0.014406507957340794\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7190213101815311,\n \"acc_stderr\": 0.012632541095875824\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/luffycodes/vicuna-class-shishya-7b-ep3", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|arc:challenge|25_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|gsm8k|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hellaswag|10_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T14-35-39.279406.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["**/details_harness|winogrande|5_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T14-35-39.279406.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T14_35_39.279406", "path": ["results_2023-12-16T14-35-39.279406.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T14-35-39.279406.parquet"]}]}]}
2023-12-16T14:39:23+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of luffycodes/vicuna-class-shishya-7b-ep3 Dataset automatically created during the evaluation run of model luffycodes/vicuna-class-shishya-7b-ep3 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T14:35:39.279406(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of luffycodes/vicuna-class-shishya-7b-ep3\n\n\n\nDataset automatically created during the evaluation run of model luffycodes/vicuna-class-shishya-7b-ep3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T14:35:39.279406(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of luffycodes/vicuna-class-shishya-7b-ep3\n\n\n\nDataset automatically created during the evaluation run of model luffycodes/vicuna-class-shishya-7b-ep3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T14:35:39.279406(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 195, 66, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of luffycodes/vicuna-class-shishya-7b-ep3\n\n\n\nDataset automatically created during the evaluation run of model luffycodes/vicuna-class-shishya-7b-ep3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T14:35:39.279406(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
6f940dea5928f0872316dc9887c4b05431b4380e
# Dataset Card for DS Coder Instruct Dataset <!-- Provide a quick summary of the dataset. --> DS Coder is a dataset for instruction fine tuning of language models. It is a specialized dataset focusing only on data science (eg. plotting, data wrangling, machine learnig models, deep learning, and numerical computations). The dataset contains code examples both in R and Python. The goal of this dataset is to enable creation of small-scale, specialized language model assistants for data science projects. ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> DS Coder instruct dataset contains *(input, instruction, output)* triplets. Instruction provides a task in the data science domain and output contains the code to solve the task. Where available, it also contains *text* field holding Alpaca style input. Metadata, such as the programming language *(lang)* and topics *(topics)* are provided. *topics* lists the concepts used in the code (eg. ML, neural networs, plotting, etc.). This is determined based on which kinds of libraries the code uses. This field can be used to obtain subset of data for specific tasks, such as data vizualisation. Additionally, the original data source is provided under the *dataset* field. ### Dataset Sources <!-- Provide the basic links for the dataset. --> DS Coder is filtered and preprocessed from a collection of publically available datasets on HuggingFace. All the sources all liste below with their corresponding links. - **nickrosh/Evol-Instruct-Code-80k-v1:** https://huggingface.co/datasets/nickrosh/Evol-Instruct-Code-80k-v1 - **TokenBender/code_instructions_122k_alpaca_style:** https://huggingface.co/datasets/TokenBender/code_instructions_122k_alpaca_style - **theblackcat102/evol-codealpaca-v1:** https://huggingface.co/datasets/theblackcat102/evol-codealpaca-v1 - **ise-uiuc/Magicoder-OSS-Instruct-75K:** https://huggingface.co/datasets/ise-uiuc/Magicoder-OSS-Instruct-75K Please make sure to cite the above mentioned source when using this dataset. You should visit these pages and look for specific usage instructions, if any. ## Dataset Creation <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> DS Coder was created by filtering and processing existing public datasets of *(instruction, code)* pairs. Source data was filtered to keep only code related to data science applications. The filtering was done using regex to gather code that uses popular data science libraries (eg. Matplotlib, Sklearn, PyTorch, etc.) in Python and R. Then, the data is further processed to filter out samples with very long or very short code. Code outputs with lots of comments and low amound of code were filtered out. Additionally, samples with very long and very short instructions were also removed. After filtering, exact deduplication based on output code and input instruction was performed. After this process, roughly *16K* samples remain. More specific description dataset processing is provided below. ### Filtering The first step of the filtering process is to gather all samples from source datasets that have code related to a data science application. To do so, regex filtering was applied to the *code* and *instruction* to filter out such samples. Regex filters mainly look for imports and usage of popular data science libraries, such as Pandas or PyTorch. Data science code in Python as well as R are gathered. After gathering relevant code samples, further filtering based on line length, instruction length, alphanumeric ratio, and comment to code ratio are performed. Code filtering is similar to [BigCode](https://github.com/bigcode-project/bigcode-dataset). Code filtering parameters shown below are derived from there. This stage ensures that short, very, long and uninformative samples are removed. The script for filtering can be found in this repo [Ea0011/wrangler](https://github.com/Ea0011/wrangler). You may use the filtering script to process additional datasets or tweak the params. Parameters for filtering are listed below: - **line_max**: Maximum line length allowed is 1000 characters. - **line_mean**: Maximum mean line length allowed is 100 characters. - **alpha_frac**: Minimum fraction of alphanumeric characters allowed is 25%. - **min_inst_size**: Minimum instruction size in words is 5 words. - **max_inst_size**: Maximum instruction size in words is 1000 words. - **max_threshold_comments**: Maximum threshold for comment to code ratio is 80%. - **min_threshold_comments**: Minimum threshold for comment to code ratio is 1%. ## Data Analysis This section provides some analysis of the dataset. Code lengths, language distribution as well as distribution of data science tasks are shown. Topic distribution shows distribution of concepts used in the code. Some domains, such as plotting are underrepresanted compared to others. You may use the topics column to select samples for specific tasks. <img src="lang_dist.png" width="60%"/> <img src="ds_dist.png" width="60%" /> <img src="inst_len_total.png" width="60%"/> <img src="topics.png" width="60%" /> As there are data points from several data sources, it is also worth showing distributions across samples from different datasets. As it can be seen, some sources contain short and concise samples while others contain verbose samples. Use this information to choose specific data source if needed. <img src="code_len.png" width="60%"/> <img src="inst_len.png" width="60%" /> ## Dataset Card Contact For any suggestions and concerns please reach out to me: [Ea0011](https://github.com/Ea0011/)
ed001/ds-coder-instruct-v1
[ "task_categories:text-generation", "task_categories:conversational", "task_categories:text2text-generation", "size_categories:10K<n<100K", "language:en", "license:cc-by-nc-sa-4.0", "code", "machine learning", "deep learning", "data science", "region:us" ]
2023-12-16T14:42:09+00:00
{"language": ["en"], "license": "cc-by-nc-sa-4.0", "size_categories": ["10K<n<100K"], "task_categories": ["text-generation", "conversational", "text2text-generation"], "pretty_name": "Data Science Coder", "tags": ["code", "machine learning", "deep learning", "data science"], "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "ds_coder.jsonl"}]}]}
2024-01-03T01:58:20+00:00
[]
[ "en" ]
TAGS #task_categories-text-generation #task_categories-conversational #task_categories-text2text-generation #size_categories-10K<n<100K #language-English #license-cc-by-nc-sa-4.0 #code #machine learning #deep learning #data science #region-us
# Dataset Card for DS Coder Instruct Dataset DS Coder is a dataset for instruction fine tuning of language models. It is a specialized dataset focusing only on data science (eg. plotting, data wrangling, machine learnig models, deep learning, and numerical computations). The dataset contains code examples both in R and Python. The goal of this dataset is to enable creation of small-scale, specialized language model assistants for data science projects. ## Dataset Details ### Dataset Description DS Coder instruct dataset contains *(input, instruction, output)* triplets. Instruction provides a task in the data science domain and output contains the code to solve the task. Where available, it also contains *text* field holding Alpaca style input. Metadata, such as the programming language *(lang)* and topics *(topics)* are provided. *topics* lists the concepts used in the code (eg. ML, neural networs, plotting, etc.). This is determined based on which kinds of libraries the code uses. This field can be used to obtain subset of data for specific tasks, such as data vizualisation. Additionally, the original data source is provided under the *dataset* field. ### Dataset Sources DS Coder is filtered and preprocessed from a collection of publically available datasets on HuggingFace. All the sources all liste below with their corresponding links. - nickrosh/Evol-Instruct-Code-80k-v1: URL - TokenBender/code_instructions_122k_alpaca_style: URL - theblackcat102/evol-codealpaca-v1: URL - ise-uiuc/Magicoder-OSS-Instruct-75K: URL Please make sure to cite the above mentioned source when using this dataset. You should visit these pages and look for specific usage instructions, if any. ## Dataset Creation DS Coder was created by filtering and processing existing public datasets of *(instruction, code)* pairs. Source data was filtered to keep only code related to data science applications. The filtering was done using regex to gather code that uses popular data science libraries (eg. Matplotlib, Sklearn, PyTorch, etc.) in Python and R. Then, the data is further processed to filter out samples with very long or very short code. Code outputs with lots of comments and low amound of code were filtered out. Additionally, samples with very long and very short instructions were also removed. After filtering, exact deduplication based on output code and input instruction was performed. After this process, roughly *16K* samples remain. More specific description dataset processing is provided below. ### Filtering The first step of the filtering process is to gather all samples from source datasets that have code related to a data science application. To do so, regex filtering was applied to the *code* and *instruction* to filter out such samples. Regex filters mainly look for imports and usage of popular data science libraries, such as Pandas or PyTorch. Data science code in Python as well as R are gathered. After gathering relevant code samples, further filtering based on line length, instruction length, alphanumeric ratio, and comment to code ratio are performed. Code filtering is similar to BigCode. Code filtering parameters shown below are derived from there. This stage ensures that short, very, long and uninformative samples are removed. The script for filtering can be found in this repo Ea0011/wrangler. You may use the filtering script to process additional datasets or tweak the params. Parameters for filtering are listed below: - line_max: Maximum line length allowed is 1000 characters. - line_mean: Maximum mean line length allowed is 100 characters. - alpha_frac: Minimum fraction of alphanumeric characters allowed is 25%. - min_inst_size: Minimum instruction size in words is 5 words. - max_inst_size: Maximum instruction size in words is 1000 words. - max_threshold_comments: Maximum threshold for comment to code ratio is 80%. - min_threshold_comments: Minimum threshold for comment to code ratio is 1%. ## Data Analysis This section provides some analysis of the dataset. Code lengths, language distribution as well as distribution of data science tasks are shown. Topic distribution shows distribution of concepts used in the code. Some domains, such as plotting are underrepresanted compared to others. You may use the topics column to select samples for specific tasks. <img src="lang_dist.png" width="60%"/> <img src="ds_dist.png" width="60%" /> <img src="inst_len_total.png" width="60%"/> <img src="URL" width="60%" /> As there are data points from several data sources, it is also worth showing distributions across samples from different datasets. As it can be seen, some sources contain short and concise samples while others contain verbose samples. Use this information to choose specific data source if needed. <img src="code_len.png" width="60%"/> <img src="inst_len.png" width="60%" /> ## Dataset Card Contact For any suggestions and concerns please reach out to me: Ea0011
[ "# Dataset Card for DS Coder Instruct Dataset\n\n\n\nDS Coder is a dataset for instruction fine tuning of language models. It is a specialized dataset focusing only on\ndata science (eg. plotting, data wrangling, machine learnig models, deep learning, and numerical computations). The dataset contains code examples both in R and Python.\nThe goal of this dataset is to enable creation of small-scale, specialized language model assistants for data science projects.", "## Dataset Details", "### Dataset Description\n\n\nDS Coder instruct dataset contains *(input, instruction, output)* triplets. Instruction provides a task in the data science domain and output contains the code to solve the task.\nWhere available, it also contains *text* field holding Alpaca style input. Metadata, such as the programming language *(lang)* and topics *(topics)* are provided.\n*topics* lists the concepts used in the code (eg. ML, neural networs, plotting, etc.). This is determined based on which kinds of libraries the code uses. This field can be used\nto obtain subset of data for specific tasks, such as data vizualisation. \n\nAdditionally, the original data source is provided under the *dataset* field.", "### Dataset Sources\n\n\nDS Coder is filtered and preprocessed from a collection of publically available datasets on HuggingFace. All the sources all liste below with their corresponding links.\n\n- nickrosh/Evol-Instruct-Code-80k-v1: URL\n- TokenBender/code_instructions_122k_alpaca_style: URL\n- theblackcat102/evol-codealpaca-v1: URL\n- ise-uiuc/Magicoder-OSS-Instruct-75K: URL\n\nPlease make sure to cite the above mentioned source when using this dataset. You should visit these pages and look for specific usage instructions, if any.", "## Dataset Creation\n\n\n\nDS Coder was created by filtering and processing existing public datasets of *(instruction, code)* pairs. Source data was filtered to keep only code related to data science\napplications. The filtering was done using regex to gather code that uses popular data science libraries (eg. Matplotlib, Sklearn, PyTorch, etc.) in Python and R.\nThen, the data is further processed to filter out samples with very long or very short code. Code outputs with lots of comments and low amound of code were filtered out.\nAdditionally, samples with very long and very short instructions were also removed. \n\nAfter filtering, exact deduplication based on output code and input instruction was performed. After this process, roughly *16K* samples remain.\n\nMore specific description dataset processing is provided below.", "### Filtering\n\nThe first step of the filtering process is to gather all samples from source datasets that have code related to a data science application. To do so, regex filtering was\napplied to the *code* and *instruction* to filter out such samples. Regex filters mainly look for imports and usage of popular data science libraries, such as Pandas or PyTorch.\nData science code in Python as well as R are gathered. \n\nAfter gathering relevant code samples, further filtering based on line length, instruction length, alphanumeric ratio, and comment to code ratio are performed.\nCode filtering is similar to BigCode. Code filtering parameters shown below are derived from there.\nThis stage ensures that short, very, long and uninformative samples are removed. The script for filtering can be found in this repo\nEa0011/wrangler. You may use the filtering script to process additional datasets or tweak the params.\n\nParameters for filtering are listed below:\n\n- line_max: Maximum line length allowed is 1000 characters.\n \n- line_mean: Maximum mean line length allowed is 100 characters.\n\n- alpha_frac: Minimum fraction of alphanumeric characters allowed is 25%.\n\n- min_inst_size: Minimum instruction size in words is 5 words.\n\n- max_inst_size: Maximum instruction size in words is 1000 words.\n\n- max_threshold_comments: Maximum threshold for comment to code ratio is 80%.\n\n- min_threshold_comments: Minimum threshold for comment to code ratio is 1%.", "## Data Analysis\n\nThis section provides some analysis of the dataset. Code lengths, language distribution as well as distribution of data science tasks are shown. Topic distribution shows\ndistribution of concepts used in the code. Some domains, such as plotting are underrepresanted compared to others. You may use the topics column to select samples for specific tasks.\n\n<img src=\"lang_dist.png\" width=\"60%\"/>\n<img src=\"ds_dist.png\" width=\"60%\" />\n<img src=\"inst_len_total.png\" width=\"60%\"/>\n<img src=\"URL\" width=\"60%\" />\n\nAs there are data points from several data sources, it is also worth showing distributions across samples from different datasets. As it can be seen, some sources\ncontain short and concise samples while others contain verbose samples. Use this information to choose specific data source if needed.\n\n<img src=\"code_len.png\" width=\"60%\"/>\n<img src=\"inst_len.png\" width=\"60%\" />", "## Dataset Card Contact\n\nFor any suggestions and concerns please reach out to me: Ea0011" ]
[ "TAGS\n#task_categories-text-generation #task_categories-conversational #task_categories-text2text-generation #size_categories-10K<n<100K #language-English #license-cc-by-nc-sa-4.0 #code #machine learning #deep learning #data science #region-us \n", "# Dataset Card for DS Coder Instruct Dataset\n\n\n\nDS Coder is a dataset for instruction fine tuning of language models. It is a specialized dataset focusing only on\ndata science (eg. plotting, data wrangling, machine learnig models, deep learning, and numerical computations). The dataset contains code examples both in R and Python.\nThe goal of this dataset is to enable creation of small-scale, specialized language model assistants for data science projects.", "## Dataset Details", "### Dataset Description\n\n\nDS Coder instruct dataset contains *(input, instruction, output)* triplets. Instruction provides a task in the data science domain and output contains the code to solve the task.\nWhere available, it also contains *text* field holding Alpaca style input. Metadata, such as the programming language *(lang)* and topics *(topics)* are provided.\n*topics* lists the concepts used in the code (eg. ML, neural networs, plotting, etc.). This is determined based on which kinds of libraries the code uses. This field can be used\nto obtain subset of data for specific tasks, such as data vizualisation. \n\nAdditionally, the original data source is provided under the *dataset* field.", "### Dataset Sources\n\n\nDS Coder is filtered and preprocessed from a collection of publically available datasets on HuggingFace. All the sources all liste below with their corresponding links.\n\n- nickrosh/Evol-Instruct-Code-80k-v1: URL\n- TokenBender/code_instructions_122k_alpaca_style: URL\n- theblackcat102/evol-codealpaca-v1: URL\n- ise-uiuc/Magicoder-OSS-Instruct-75K: URL\n\nPlease make sure to cite the above mentioned source when using this dataset. You should visit these pages and look for specific usage instructions, if any.", "## Dataset Creation\n\n\n\nDS Coder was created by filtering and processing existing public datasets of *(instruction, code)* pairs. Source data was filtered to keep only code related to data science\napplications. The filtering was done using regex to gather code that uses popular data science libraries (eg. Matplotlib, Sklearn, PyTorch, etc.) in Python and R.\nThen, the data is further processed to filter out samples with very long or very short code. Code outputs with lots of comments and low amound of code were filtered out.\nAdditionally, samples with very long and very short instructions were also removed. \n\nAfter filtering, exact deduplication based on output code and input instruction was performed. After this process, roughly *16K* samples remain.\n\nMore specific description dataset processing is provided below.", "### Filtering\n\nThe first step of the filtering process is to gather all samples from source datasets that have code related to a data science application. To do so, regex filtering was\napplied to the *code* and *instruction* to filter out such samples. Regex filters mainly look for imports and usage of popular data science libraries, such as Pandas or PyTorch.\nData science code in Python as well as R are gathered. \n\nAfter gathering relevant code samples, further filtering based on line length, instruction length, alphanumeric ratio, and comment to code ratio are performed.\nCode filtering is similar to BigCode. Code filtering parameters shown below are derived from there.\nThis stage ensures that short, very, long and uninformative samples are removed. The script for filtering can be found in this repo\nEa0011/wrangler. You may use the filtering script to process additional datasets or tweak the params.\n\nParameters for filtering are listed below:\n\n- line_max: Maximum line length allowed is 1000 characters.\n \n- line_mean: Maximum mean line length allowed is 100 characters.\n\n- alpha_frac: Minimum fraction of alphanumeric characters allowed is 25%.\n\n- min_inst_size: Minimum instruction size in words is 5 words.\n\n- max_inst_size: Maximum instruction size in words is 1000 words.\n\n- max_threshold_comments: Maximum threshold for comment to code ratio is 80%.\n\n- min_threshold_comments: Minimum threshold for comment to code ratio is 1%.", "## Data Analysis\n\nThis section provides some analysis of the dataset. Code lengths, language distribution as well as distribution of data science tasks are shown. Topic distribution shows\ndistribution of concepts used in the code. Some domains, such as plotting are underrepresanted compared to others. You may use the topics column to select samples for specific tasks.\n\n<img src=\"lang_dist.png\" width=\"60%\"/>\n<img src=\"ds_dist.png\" width=\"60%\" />\n<img src=\"inst_len_total.png\" width=\"60%\"/>\n<img src=\"URL\" width=\"60%\" />\n\nAs there are data points from several data sources, it is also worth showing distributions across samples from different datasets. As it can be seen, some sources\ncontain short and concise samples while others contain verbose samples. Use this information to choose specific data source if needed.\n\n<img src=\"code_len.png\" width=\"60%\"/>\n<img src=\"inst_len.png\" width=\"60%\" />", "## Dataset Card Contact\n\nFor any suggestions and concerns please reach out to me: Ea0011" ]
[ 80, 109, 4, 181, 147, 193, 359, 253, 20 ]
[ "passage: TAGS\n#task_categories-text-generation #task_categories-conversational #task_categories-text2text-generation #size_categories-10K<n<100K #language-English #license-cc-by-nc-sa-4.0 #code #machine learning #deep learning #data science #region-us \n# Dataset Card for DS Coder Instruct Dataset\n\n\n\nDS Coder is a dataset for instruction fine tuning of language models. It is a specialized dataset focusing only on\ndata science (eg. plotting, data wrangling, machine learnig models, deep learning, and numerical computations). The dataset contains code examples both in R and Python.\nThe goal of this dataset is to enable creation of small-scale, specialized language model assistants for data science projects.## Dataset Details### Dataset Description\n\n\nDS Coder instruct dataset contains *(input, instruction, output)* triplets. Instruction provides a task in the data science domain and output contains the code to solve the task.\nWhere available, it also contains *text* field holding Alpaca style input. Metadata, such as the programming language *(lang)* and topics *(topics)* are provided.\n*topics* lists the concepts used in the code (eg. ML, neural networs, plotting, etc.). This is determined based on which kinds of libraries the code uses. This field can be used\nto obtain subset of data for specific tasks, such as data vizualisation. \n\nAdditionally, the original data source is provided under the *dataset* field.", "passage: ### Dataset Sources\n\n\nDS Coder is filtered and preprocessed from a collection of publically available datasets on HuggingFace. All the sources all liste below with their corresponding links.\n\n- nickrosh/Evol-Instruct-Code-80k-v1: URL\n- TokenBender/code_instructions_122k_alpaca_style: URL\n- theblackcat102/evol-codealpaca-v1: URL\n- ise-uiuc/Magicoder-OSS-Instruct-75K: URL\n\nPlease make sure to cite the above mentioned source when using this dataset. You should visit these pages and look for specific usage instructions, if any.## Dataset Creation\n\n\n\nDS Coder was created by filtering and processing existing public datasets of *(instruction, code)* pairs. Source data was filtered to keep only code related to data science\napplications. The filtering was done using regex to gather code that uses popular data science libraries (eg. Matplotlib, Sklearn, PyTorch, etc.) in Python and R.\nThen, the data is further processed to filter out samples with very long or very short code. Code outputs with lots of comments and low amound of code were filtered out.\nAdditionally, samples with very long and very short instructions were also removed. \n\nAfter filtering, exact deduplication based on output code and input instruction was performed. After this process, roughly *16K* samples remain.\n\nMore specific description dataset processing is provided below." ]
51e8d1f120fe3eba1ebf473548a0034569e5b1d7
# Dataset of akane (Blue Archive) This is the dataset of akane (Blue Archive), containing 498 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). This is a WebUI contains crawlers and other thing: ([LittleAppleWebUI](https://github.com/LittleApple-fp16/LittleAppleWebUI)) | Name | Images | Download | Description | |:----------------|---------:|:----------------------------------------|:-----------------------------------------------------------------------------------------| | raw | 498 | [Download](dataset-raw.zip) | Raw data with meta information. | | raw-stage3 | 1352 | [Download](dataset-raw-stage3.zip) | 3-stage cropped raw data with meta information. | | raw-stage3-eyes | 1567 | [Download](dataset-raw-stage3-eyes.zip) | 3-stage cropped (with eye-focus) raw data with meta information. | | 384x512 | 498 | [Download](dataset-384x512.zip) | 384x512 aligned dataset. | | 512x704 | 498 | [Download](dataset-512x704.zip) | 512x704 aligned dataset. | | 640x880 | 498 | [Download](dataset-640x880.zip) | 640x880 aligned dataset. | | stage3-640 | 1352 | [Download](dataset-stage3-640.zip) | 3-stage cropped dataset with the shorter side not exceeding 640 pixels. | | stage3-800 | 1352 | [Download](dataset-stage3-800.zip) | 3-stage cropped dataset with the shorter side not exceeding 800 pixels. | | stage3-p512-640 | 1256 | [Download](dataset-stage3-p512-640.zip) | 3-stage cropped dataset with the area not less than 512x512 pixels. | | stage3-eyes-640 | 1567 | [Download](dataset-stage3-eyes-640.zip) | 3-stage cropped (with eye-focus) dataset with the shorter side not exceeding 640 pixels. | | stage3-eyes-800 | 1567 | [Download](dataset-stage3-eyes-800.zip) | 3-stage cropped (with eye-focus) dataset with the shorter side not exceeding 800 pixels. |
AppleHarem/akane_bluearchive
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-12-16T14:44:23+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2023-12-16T14:46:11+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of akane (Blue Archive) =============================== This is the dataset of akane (Blue Archive), containing 498 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). This is a WebUI contains crawlers and other thing: (LittleAppleWebUI)
[]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n" ]
[ 44 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n" ]
c11400307d931669155c18df7b4727263f9bd952
# Dataset Card for "skillate_helpdesk" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
bipulai/skillate_helpdesk
[ "region:us" ]
2023-12-16T14:45:19+00:00
{"dataset_info": {"features": [{"name": "question", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "cleaned_question", "dtype": "string"}, {"name": "cleaned_answer", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 617367, "num_examples": 302}], "download_size": 236993, "dataset_size": 617367}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-12-16T14:46:02+00:00
[]
[]
TAGS #region-us
# Dataset Card for "skillate_helpdesk" More Information needed
[ "# Dataset Card for \"skillate_helpdesk\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"skillate_helpdesk\"\n\nMore Information needed" ]
[ 6, 15 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"skillate_helpdesk\"\n\nMore Information needed" ]
6c2f81575cca265fa9d46dffb5a8d11e2b5f4986
# Dataset Card for Evaluation run of luffycodes/vicuna-class-shishya-all-hal-7b-ep3 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [luffycodes/vicuna-class-shishya-all-hal-7b-ep3](https://huggingface.co/luffycodes/vicuna-class-shishya-all-hal-7b-ep3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_luffycodes__vicuna-class-shishya-all-hal-7b-ep3", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T14:43:13.038199](https://huggingface.co/datasets/open-llm-leaderboard/details_luffycodes__vicuna-class-shishya-all-hal-7b-ep3/blob/main/results_2023-12-16T14-43-13.038199.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5101563719470747, "acc_stderr": 0.034174127741758806, "acc_norm": 0.5187563327932377, "acc_norm_stderr": 0.035034297734345236, "mc1": 0.2962056303549572, "mc1_stderr": 0.015983595101811392, "mc2": 0.4483407078884665, "mc2_stderr": 0.015114510843263715 }, "harness|arc:challenge|25": { "acc": 0.42150170648464164, "acc_stderr": 0.014430197069326014, "acc_norm": 0.454778156996587, "acc_norm_stderr": 0.014551507060836355 }, "harness|hellaswag|10": { "acc": 0.5836486755626369, "acc_stderr": 0.004919457850104234, "acc_norm": 0.7720573590918144, "acc_norm_stderr": 0.004186480645315569 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4666666666666667, "acc_stderr": 0.043097329010363554, "acc_norm": 0.4666666666666667, "acc_norm_stderr": 0.043097329010363554 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.506578947368421, "acc_stderr": 0.040685900502249704, "acc_norm": 0.506578947368421, "acc_norm_stderr": 0.040685900502249704 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5547169811320755, "acc_stderr": 0.030588052974270655, "acc_norm": 0.5547169811320755, "acc_norm_stderr": 0.030588052974270655 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5208333333333334, "acc_stderr": 0.041775789507399935, "acc_norm": 0.5208333333333334, "acc_norm_stderr": 0.041775789507399935 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.4393063583815029, "acc_stderr": 0.037842719328874674, "acc_norm": 0.4393063583815029, "acc_norm_stderr": 0.037842719328874674 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.20588235294117646, "acc_stderr": 0.040233822736177476, "acc_norm": 0.20588235294117646, "acc_norm_stderr": 0.040233822736177476 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.64, "acc_stderr": 0.04824181513244218, "acc_norm": 0.64, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.451063829787234, "acc_stderr": 0.032529096196131965, "acc_norm": 0.451063829787234, "acc_norm_stderr": 0.032529096196131965 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.044346007015849245, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.044346007015849245 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.47586206896551725, "acc_stderr": 0.041618085035015295, "acc_norm": 0.47586206896551725, "acc_norm_stderr": 0.041618085035015295 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.30158730158730157, "acc_stderr": 0.023636975996101803, "acc_norm": 0.30158730158730157, "acc_norm_stderr": 0.023636975996101803 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3492063492063492, "acc_stderr": 0.04263906892795133, "acc_norm": 0.3492063492063492, "acc_norm_stderr": 0.04263906892795133 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.5612903225806452, "acc_stderr": 0.02822949732031722, "acc_norm": 0.5612903225806452, "acc_norm_stderr": 0.02822949732031722 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4039408866995074, "acc_stderr": 0.034524539038220406, "acc_norm": 0.4039408866995074, "acc_norm_stderr": 0.034524539038220406 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6303030303030303, "acc_stderr": 0.03769430314512568, "acc_norm": 0.6303030303030303, "acc_norm_stderr": 0.03769430314512568 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.5858585858585859, "acc_stderr": 0.03509438348879629, "acc_norm": 0.5858585858585859, "acc_norm_stderr": 0.03509438348879629 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7564766839378239, "acc_stderr": 0.030975436386845436, "acc_norm": 0.7564766839378239, "acc_norm_stderr": 0.030975436386845436 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5256410256410257, "acc_stderr": 0.025317649726448663, "acc_norm": 0.5256410256410257, "acc_norm_stderr": 0.025317649726448663 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2740740740740741, "acc_stderr": 0.027195934804085626, "acc_norm": 0.2740740740740741, "acc_norm_stderr": 0.027195934804085626 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.47478991596638653, "acc_stderr": 0.0324371805513741, "acc_norm": 0.47478991596638653, "acc_norm_stderr": 0.0324371805513741 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3443708609271523, "acc_stderr": 0.038796870240733264, "acc_norm": 0.3443708609271523, "acc_norm_stderr": 0.038796870240733264 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7027522935779816, "acc_stderr": 0.019595707224643523, "acc_norm": 0.7027522935779816, "acc_norm_stderr": 0.019595707224643523 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4027777777777778, "acc_stderr": 0.03344887382997866, "acc_norm": 0.4027777777777778, "acc_norm_stderr": 0.03344887382997866 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7107843137254902, "acc_stderr": 0.031822318676475544, "acc_norm": 0.7107843137254902, "acc_norm_stderr": 0.031822318676475544 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7172995780590717, "acc_stderr": 0.029312814153955934, "acc_norm": 0.7172995780590717, "acc_norm_stderr": 0.029312814153955934 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6143497757847534, "acc_stderr": 0.03266842214289202, "acc_norm": 0.6143497757847534, "acc_norm_stderr": 0.03266842214289202 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.648854961832061, "acc_stderr": 0.04186445163013751, "acc_norm": 0.648854961832061, "acc_norm_stderr": 0.04186445163013751 }, "harness|hendrycksTest-international_law|5": { "acc": 0.5867768595041323, "acc_stderr": 0.04495087843548408, "acc_norm": 0.5867768595041323, "acc_norm_stderr": 0.04495087843548408 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.5277777777777778, "acc_stderr": 0.048262172941398944, "acc_norm": 0.5277777777777778, "acc_norm_stderr": 0.048262172941398944 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.5337423312883436, "acc_stderr": 0.039194155450484096, "acc_norm": 0.5337423312883436, "acc_norm_stderr": 0.039194155450484096 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.41964285714285715, "acc_stderr": 0.04684099321077106, "acc_norm": 0.41964285714285715, "acc_norm_stderr": 0.04684099321077106 }, "harness|hendrycksTest-management|5": { "acc": 0.7087378640776699, "acc_stderr": 0.04498676320572924, "acc_norm": 0.7087378640776699, "acc_norm_stderr": 0.04498676320572924 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7735042735042735, "acc_stderr": 0.02742100729539292, "acc_norm": 0.7735042735042735, "acc_norm_stderr": 0.02742100729539292 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.59, "acc_stderr": 0.049431107042371025, "acc_norm": 0.59, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.6896551724137931, "acc_stderr": 0.016543785026048315, "acc_norm": 0.6896551724137931, "acc_norm_stderr": 0.016543785026048315 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5606936416184971, "acc_stderr": 0.026720034380514995, "acc_norm": 0.5606936416184971, "acc_norm_stderr": 0.026720034380514995 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3016759776536313, "acc_stderr": 0.015350767572220286, "acc_norm": 0.3016759776536313, "acc_norm_stderr": 0.015350767572220286 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5882352941176471, "acc_stderr": 0.02818059632825929, "acc_norm": 0.5882352941176471, "acc_norm_stderr": 0.02818059632825929 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6109324758842444, "acc_stderr": 0.027690337536485372, "acc_norm": 0.6109324758842444, "acc_norm_stderr": 0.027690337536485372 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5740740740740741, "acc_stderr": 0.027513747284379424, "acc_norm": 0.5740740740740741, "acc_norm_stderr": 0.027513747284379424 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.36524822695035464, "acc_stderr": 0.028723863853281285, "acc_norm": 0.36524822695035464, "acc_norm_stderr": 0.028723863853281285 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.37353324641460234, "acc_stderr": 0.012354994823515266, "acc_norm": 0.37353324641460234, "acc_norm_stderr": 0.012354994823515266 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5183823529411765, "acc_stderr": 0.030352303395351964, "acc_norm": 0.5183823529411765, "acc_norm_stderr": 0.030352303395351964 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.49836601307189543, "acc_stderr": 0.020227726838150117, "acc_norm": 0.49836601307189543, "acc_norm_stderr": 0.020227726838150117 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6272727272727273, "acc_stderr": 0.04631381319425465, "acc_norm": 0.6272727272727273, "acc_norm_stderr": 0.04631381319425465 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.636734693877551, "acc_stderr": 0.03078905113903081, "acc_norm": 0.636734693877551, "acc_norm_stderr": 0.03078905113903081 }, "harness|hendrycksTest-sociology|5": { "acc": 0.681592039800995, "acc_stderr": 0.03294118479054095, "acc_norm": 0.681592039800995, "acc_norm_stderr": 0.03294118479054095 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-virology|5": { "acc": 0.4036144578313253, "acc_stderr": 0.038194861407583984, "acc_norm": 0.4036144578313253, "acc_norm_stderr": 0.038194861407583984 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7309941520467836, "acc_stderr": 0.03401052620104089, "acc_norm": 0.7309941520467836, "acc_norm_stderr": 0.03401052620104089 }, "harness|truthfulqa:mc|0": { "mc1": 0.2962056303549572, "mc1_stderr": 0.015983595101811392, "mc2": 0.4483407078884665, "mc2_stderr": 0.015114510843263715 }, "harness|winogrande|5": { "acc": 0.7103393843725335, "acc_stderr": 0.012748550807638263 }, "harness|gsm8k|5": { "acc": 0.024260803639120546, "acc_stderr": 0.004238007900001396 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_luffycodes__vicuna-class-shishya-all-hal-7b-ep3
[ "region:us" ]
2023-12-16T14:46:08+00:00
{"pretty_name": "Evaluation run of luffycodes/vicuna-class-shishya-all-hal-7b-ep3", "dataset_summary": "Dataset automatically created during the evaluation run of model [luffycodes/vicuna-class-shishya-all-hal-7b-ep3](https://huggingface.co/luffycodes/vicuna-class-shishya-all-hal-7b-ep3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_luffycodes__vicuna-class-shishya-all-hal-7b-ep3\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T14:43:13.038199](https://huggingface.co/datasets/open-llm-leaderboard/details_luffycodes__vicuna-class-shishya-all-hal-7b-ep3/blob/main/results_2023-12-16T14-43-13.038199.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5101563719470747,\n \"acc_stderr\": 0.034174127741758806,\n \"acc_norm\": 0.5187563327932377,\n \"acc_norm_stderr\": 0.035034297734345236,\n \"mc1\": 0.2962056303549572,\n \"mc1_stderr\": 0.015983595101811392,\n \"mc2\": 0.4483407078884665,\n \"mc2_stderr\": 0.015114510843263715\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.42150170648464164,\n \"acc_stderr\": 0.014430197069326014,\n \"acc_norm\": 0.454778156996587,\n \"acc_norm_stderr\": 0.014551507060836355\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5836486755626369,\n \"acc_stderr\": 0.004919457850104234,\n \"acc_norm\": 0.7720573590918144,\n \"acc_norm_stderr\": 0.004186480645315569\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4666666666666667,\n \"acc_stderr\": 0.043097329010363554,\n \"acc_norm\": 0.4666666666666667,\n \"acc_norm_stderr\": 0.043097329010363554\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.506578947368421,\n \"acc_stderr\": 0.040685900502249704,\n \"acc_norm\": 0.506578947368421,\n \"acc_norm_stderr\": 0.040685900502249704\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5547169811320755,\n \"acc_stderr\": 0.030588052974270655,\n \"acc_norm\": 0.5547169811320755,\n \"acc_norm_stderr\": 0.030588052974270655\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5208333333333334,\n \"acc_stderr\": 0.041775789507399935,\n \"acc_norm\": 0.5208333333333334,\n \"acc_norm_stderr\": 0.041775789507399935\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.4393063583815029,\n \"acc_stderr\": 0.037842719328874674,\n \"acc_norm\": 0.4393063583815029,\n \"acc_norm_stderr\": 0.037842719328874674\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.20588235294117646,\n \"acc_stderr\": 0.040233822736177476,\n \"acc_norm\": 0.20588235294117646,\n \"acc_norm_stderr\": 0.040233822736177476\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.451063829787234,\n \"acc_stderr\": 0.032529096196131965,\n \"acc_norm\": 0.451063829787234,\n \"acc_norm_stderr\": 0.032529096196131965\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.044346007015849245,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.044346007015849245\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.47586206896551725,\n \"acc_stderr\": 0.041618085035015295,\n \"acc_norm\": 0.47586206896551725,\n \"acc_norm_stderr\": 0.041618085035015295\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.30158730158730157,\n \"acc_stderr\": 0.023636975996101803,\n \"acc_norm\": 0.30158730158730157,\n \"acc_norm_stderr\": 0.023636975996101803\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3492063492063492,\n \"acc_stderr\": 0.04263906892795133,\n \"acc_norm\": 0.3492063492063492,\n \"acc_norm_stderr\": 0.04263906892795133\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.5612903225806452,\n \"acc_stderr\": 0.02822949732031722,\n \"acc_norm\": 0.5612903225806452,\n \"acc_norm_stderr\": 0.02822949732031722\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4039408866995074,\n \"acc_stderr\": 0.034524539038220406,\n \"acc_norm\": 0.4039408866995074,\n \"acc_norm_stderr\": 0.034524539038220406\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.6303030303030303,\n \"acc_stderr\": 0.03769430314512568,\n \"acc_norm\": 0.6303030303030303,\n \"acc_norm_stderr\": 0.03769430314512568\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.5858585858585859,\n \"acc_stderr\": 0.03509438348879629,\n \"acc_norm\": 0.5858585858585859,\n \"acc_norm_stderr\": 0.03509438348879629\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.7564766839378239,\n \"acc_stderr\": 0.030975436386845436,\n \"acc_norm\": 0.7564766839378239,\n \"acc_norm_stderr\": 0.030975436386845436\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5256410256410257,\n \"acc_stderr\": 0.025317649726448663,\n \"acc_norm\": 0.5256410256410257,\n \"acc_norm_stderr\": 0.025317649726448663\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2740740740740741,\n \"acc_stderr\": 0.027195934804085626,\n \"acc_norm\": 0.2740740740740741,\n \"acc_norm_stderr\": 0.027195934804085626\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.47478991596638653,\n \"acc_stderr\": 0.0324371805513741,\n \"acc_norm\": 0.47478991596638653,\n \"acc_norm_stderr\": 0.0324371805513741\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3443708609271523,\n \"acc_stderr\": 0.038796870240733264,\n \"acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.038796870240733264\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7027522935779816,\n \"acc_stderr\": 0.019595707224643523,\n \"acc_norm\": 0.7027522935779816,\n \"acc_norm_stderr\": 0.019595707224643523\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4027777777777778,\n \"acc_stderr\": 0.03344887382997866,\n \"acc_norm\": 0.4027777777777778,\n \"acc_norm_stderr\": 0.03344887382997866\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7107843137254902,\n \"acc_stderr\": 0.031822318676475544,\n \"acc_norm\": 0.7107843137254902,\n \"acc_norm_stderr\": 0.031822318676475544\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7172995780590717,\n \"acc_stderr\": 0.029312814153955934,\n \"acc_norm\": 0.7172995780590717,\n \"acc_norm_stderr\": 0.029312814153955934\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6143497757847534,\n \"acc_stderr\": 0.03266842214289202,\n \"acc_norm\": 0.6143497757847534,\n \"acc_norm_stderr\": 0.03266842214289202\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.648854961832061,\n \"acc_stderr\": 0.04186445163013751,\n \"acc_norm\": 0.648854961832061,\n \"acc_norm_stderr\": 0.04186445163013751\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.5867768595041323,\n \"acc_stderr\": 0.04495087843548408,\n \"acc_norm\": 0.5867768595041323,\n \"acc_norm_stderr\": 0.04495087843548408\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.5277777777777778,\n \"acc_stderr\": 0.048262172941398944,\n \"acc_norm\": 0.5277777777777778,\n \"acc_norm_stderr\": 0.048262172941398944\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.5337423312883436,\n \"acc_stderr\": 0.039194155450484096,\n \"acc_norm\": 0.5337423312883436,\n \"acc_norm_stderr\": 0.039194155450484096\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.41964285714285715,\n \"acc_stderr\": 0.04684099321077106,\n \"acc_norm\": 0.41964285714285715,\n \"acc_norm_stderr\": 0.04684099321077106\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7087378640776699,\n \"acc_stderr\": 0.04498676320572924,\n \"acc_norm\": 0.7087378640776699,\n \"acc_norm_stderr\": 0.04498676320572924\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7735042735042735,\n \"acc_stderr\": 0.02742100729539292,\n \"acc_norm\": 0.7735042735042735,\n \"acc_norm_stderr\": 0.02742100729539292\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.59,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6896551724137931,\n \"acc_stderr\": 0.016543785026048315,\n \"acc_norm\": 0.6896551724137931,\n \"acc_norm_stderr\": 0.016543785026048315\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.5606936416184971,\n \"acc_stderr\": 0.026720034380514995,\n \"acc_norm\": 0.5606936416184971,\n \"acc_norm_stderr\": 0.026720034380514995\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3016759776536313,\n \"acc_stderr\": 0.015350767572220286,\n \"acc_norm\": 0.3016759776536313,\n \"acc_norm_stderr\": 0.015350767572220286\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.5882352941176471,\n \"acc_stderr\": 0.02818059632825929,\n \"acc_norm\": 0.5882352941176471,\n \"acc_norm_stderr\": 0.02818059632825929\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6109324758842444,\n \"acc_stderr\": 0.027690337536485372,\n \"acc_norm\": 0.6109324758842444,\n \"acc_norm_stderr\": 0.027690337536485372\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.5740740740740741,\n \"acc_stderr\": 0.027513747284379424,\n \"acc_norm\": 0.5740740740740741,\n \"acc_norm_stderr\": 0.027513747284379424\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.36524822695035464,\n \"acc_stderr\": 0.028723863853281285,\n \"acc_norm\": 0.36524822695035464,\n \"acc_norm_stderr\": 0.028723863853281285\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.37353324641460234,\n \"acc_stderr\": 0.012354994823515266,\n \"acc_norm\": 0.37353324641460234,\n \"acc_norm_stderr\": 0.012354994823515266\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5183823529411765,\n \"acc_stderr\": 0.030352303395351964,\n \"acc_norm\": 0.5183823529411765,\n \"acc_norm_stderr\": 0.030352303395351964\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.49836601307189543,\n \"acc_stderr\": 0.020227726838150117,\n \"acc_norm\": 0.49836601307189543,\n \"acc_norm_stderr\": 0.020227726838150117\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6272727272727273,\n \"acc_stderr\": 0.04631381319425465,\n \"acc_norm\": 0.6272727272727273,\n \"acc_norm_stderr\": 0.04631381319425465\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.636734693877551,\n \"acc_stderr\": 0.03078905113903081,\n \"acc_norm\": 0.636734693877551,\n \"acc_norm_stderr\": 0.03078905113903081\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.681592039800995,\n \"acc_stderr\": 0.03294118479054095,\n \"acc_norm\": 0.681592039800995,\n \"acc_norm_stderr\": 0.03294118479054095\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4036144578313253,\n \"acc_stderr\": 0.038194861407583984,\n \"acc_norm\": 0.4036144578313253,\n \"acc_norm_stderr\": 0.038194861407583984\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7309941520467836,\n \"acc_stderr\": 0.03401052620104089,\n \"acc_norm\": 0.7309941520467836,\n \"acc_norm_stderr\": 0.03401052620104089\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2962056303549572,\n \"mc1_stderr\": 0.015983595101811392,\n \"mc2\": 0.4483407078884665,\n \"mc2_stderr\": 0.015114510843263715\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7103393843725335,\n \"acc_stderr\": 0.012748550807638263\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.024260803639120546,\n \"acc_stderr\": 0.004238007900001396\n }\n}\n```", "repo_url": "https://huggingface.co/luffycodes/vicuna-class-shishya-all-hal-7b-ep3", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|arc:challenge|25_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|gsm8k|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hellaswag|10_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T14-43-13.038199.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["**/details_harness|winogrande|5_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T14-43-13.038199.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T14_43_13.038199", "path": ["results_2023-12-16T14-43-13.038199.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T14-43-13.038199.parquet"]}]}]}
2023-12-16T14:46:49+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of luffycodes/vicuna-class-shishya-all-hal-7b-ep3 Dataset automatically created during the evaluation run of model luffycodes/vicuna-class-shishya-all-hal-7b-ep3 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T14:43:13.038199(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of luffycodes/vicuna-class-shishya-all-hal-7b-ep3\n\n\n\nDataset automatically created during the evaluation run of model luffycodes/vicuna-class-shishya-all-hal-7b-ep3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T14:43:13.038199(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of luffycodes/vicuna-class-shishya-all-hal-7b-ep3\n\n\n\nDataset automatically created during the evaluation run of model luffycodes/vicuna-class-shishya-all-hal-7b-ep3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T14:43:13.038199(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 203, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of luffycodes/vicuna-class-shishya-all-hal-7b-ep3\n\n\n\nDataset automatically created during the evaluation run of model luffycodes/vicuna-class-shishya-all-hal-7b-ep3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T14:43:13.038199(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
c210ac0bd9f89ce1d286cc599e4d92093f429450
# Dataset Card for Evaluation run of rwitz2/go-bruins-v2.1.1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [rwitz2/go-bruins-v2.1.1](https://huggingface.co/rwitz2/go-bruins-v2.1.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_rwitz2__go-bruins-v2.1.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T14:47:35.871677](https://huggingface.co/datasets/open-llm-leaderboard/details_rwitz2__go-bruins-v2.1.1/blob/main/results_2023-12-16T14-47-35.871677.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6573437520687897, "acc_stderr": 0.03198449258012776, "acc_norm": 0.6569821271376254, "acc_norm_stderr": 0.03264628402677364, "mc1": 0.5593635250917993, "mc1_stderr": 0.017379697555437446, "mc2": 0.6980242594144186, "mc2_stderr": 0.015016792773490651 }, "harness|arc:challenge|25": { "acc": 0.6988054607508533, "acc_stderr": 0.013406741767847629, "acc_norm": 0.7286689419795221, "acc_norm_stderr": 0.012993807727545797 }, "harness|hellaswag|10": { "acc": 0.7141007767377017, "acc_stderr": 0.004509181919322842, "acc_norm": 0.8832901812387971, "acc_norm_stderr": 0.0032041800729423787 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6444444444444445, "acc_stderr": 0.04135176749720386, "acc_norm": 0.6444444444444445, "acc_norm_stderr": 0.04135176749720386 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7039473684210527, "acc_stderr": 0.03715062154998904, "acc_norm": 0.7039473684210527, "acc_norm_stderr": 0.03715062154998904 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.63, "acc_stderr": 0.04852365870939099, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7245283018867924, "acc_stderr": 0.027495663683724057, "acc_norm": 0.7245283018867924, "acc_norm_stderr": 0.027495663683724057 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7708333333333334, "acc_stderr": 0.03514697467862388, "acc_norm": 0.7708333333333334, "acc_norm_stderr": 0.03514697467862388 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.56, "acc_stderr": 0.049888765156985884, "acc_norm": 0.56, "acc_norm_stderr": 0.049888765156985884 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6936416184971098, "acc_stderr": 0.03514942551267439, "acc_norm": 0.6936416184971098, "acc_norm_stderr": 0.03514942551267439 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.46078431372549017, "acc_stderr": 0.04959859966384181, "acc_norm": 0.46078431372549017, "acc_norm_stderr": 0.04959859966384181 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.042295258468165065, "acc_norm": 0.77, "acc_norm_stderr": 0.042295258468165065 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5787234042553191, "acc_stderr": 0.03227834510146268, "acc_norm": 0.5787234042553191, "acc_norm_stderr": 0.03227834510146268 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5087719298245614, "acc_stderr": 0.04702880432049615, "acc_norm": 0.5087719298245614, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5724137931034483, "acc_stderr": 0.04122737111370333, "acc_norm": 0.5724137931034483, "acc_norm_stderr": 0.04122737111370333 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42592592592592593, "acc_stderr": 0.02546714904546955, "acc_norm": 0.42592592592592593, "acc_norm_stderr": 0.02546714904546955 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.46825396825396826, "acc_stderr": 0.04463112720677172, "acc_norm": 0.46825396825396826, "acc_norm_stderr": 0.04463112720677172 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7903225806451613, "acc_stderr": 0.023157879349083525, "acc_norm": 0.7903225806451613, "acc_norm_stderr": 0.023157879349083525 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5270935960591133, "acc_stderr": 0.03512819077876106, "acc_norm": 0.5270935960591133, "acc_norm_stderr": 0.03512819077876106 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7818181818181819, "acc_stderr": 0.03225078108306289, "acc_norm": 0.7818181818181819, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7878787878787878, "acc_stderr": 0.029126522834586815, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.029126522834586815 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9067357512953368, "acc_stderr": 0.02098685459328974, "acc_norm": 0.9067357512953368, "acc_norm_stderr": 0.02098685459328974 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6641025641025641, "acc_stderr": 0.023946724741563973, "acc_norm": 0.6641025641025641, "acc_norm_stderr": 0.023946724741563973 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.32592592592592595, "acc_stderr": 0.02857834836547308, "acc_norm": 0.32592592592592595, "acc_norm_stderr": 0.02857834836547308 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6680672268907563, "acc_stderr": 0.03058869701378364, "acc_norm": 0.6680672268907563, "acc_norm_stderr": 0.03058869701378364 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3509933774834437, "acc_stderr": 0.03896981964257375, "acc_norm": 0.3509933774834437, "acc_norm_stderr": 0.03896981964257375 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8532110091743119, "acc_stderr": 0.01517314184512625, "acc_norm": 0.8532110091743119, "acc_norm_stderr": 0.01517314184512625 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5416666666666666, "acc_stderr": 0.03398110890294636, "acc_norm": 0.5416666666666666, "acc_norm_stderr": 0.03398110890294636 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8235294117647058, "acc_stderr": 0.026756401538078962, "acc_norm": 0.8235294117647058, "acc_norm_stderr": 0.026756401538078962 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8059071729957806, "acc_stderr": 0.025744902532290916, "acc_norm": 0.8059071729957806, "acc_norm_stderr": 0.025744902532290916 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6905829596412556, "acc_stderr": 0.031024411740572213, "acc_norm": 0.6905829596412556, "acc_norm_stderr": 0.031024411740572213 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8244274809160306, "acc_stderr": 0.03336820338476074, "acc_norm": 0.8244274809160306, "acc_norm_stderr": 0.03336820338476074 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8016528925619835, "acc_stderr": 0.03640118271990947, "acc_norm": 0.8016528925619835, "acc_norm_stderr": 0.03640118271990947 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7685185185185185, "acc_stderr": 0.04077494709252627, "acc_norm": 0.7685185185185185, "acc_norm_stderr": 0.04077494709252627 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7852760736196319, "acc_stderr": 0.03226219377286775, "acc_norm": 0.7852760736196319, "acc_norm_stderr": 0.03226219377286775 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4107142857142857, "acc_stderr": 0.04669510663875191, "acc_norm": 0.4107142857142857, "acc_norm_stderr": 0.04669510663875191 }, "harness|hendrycksTest-management|5": { "acc": 0.7572815533980582, "acc_stderr": 0.04245022486384495, "acc_norm": 0.7572815533980582, "acc_norm_stderr": 0.04245022486384495 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8803418803418803, "acc_stderr": 0.021262719400406957, "acc_norm": 0.8803418803418803, "acc_norm_stderr": 0.021262719400406957 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8339719029374202, "acc_stderr": 0.013306478243066302, "acc_norm": 0.8339719029374202, "acc_norm_stderr": 0.013306478243066302 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7369942196531792, "acc_stderr": 0.02370309952525818, "acc_norm": 0.7369942196531792, "acc_norm_stderr": 0.02370309952525818 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4793296089385475, "acc_stderr": 0.016708205559996137, "acc_norm": 0.4793296089385475, "acc_norm_stderr": 0.016708205559996137 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7254901960784313, "acc_stderr": 0.025553169991826528, "acc_norm": 0.7254901960784313, "acc_norm_stderr": 0.025553169991826528 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7170418006430869, "acc_stderr": 0.02558306248998481, "acc_norm": 0.7170418006430869, "acc_norm_stderr": 0.02558306248998481 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7469135802469136, "acc_stderr": 0.024191808600712995, "acc_norm": 0.7469135802469136, "acc_norm_stderr": 0.024191808600712995 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.475177304964539, "acc_stderr": 0.029790719243829727, "acc_norm": 0.475177304964539, "acc_norm_stderr": 0.029790719243829727 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.47196870925684486, "acc_stderr": 0.012750151802922438, "acc_norm": 0.47196870925684486, "acc_norm_stderr": 0.012750151802922438 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6875, "acc_stderr": 0.02815637344037142, "acc_norm": 0.6875, "acc_norm_stderr": 0.02815637344037142 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6813725490196079, "acc_stderr": 0.01885008469646872, "acc_norm": 0.6813725490196079, "acc_norm_stderr": 0.01885008469646872 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6818181818181818, "acc_stderr": 0.044612721759105085, "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.044612721759105085 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7346938775510204, "acc_stderr": 0.028263889943784593, "acc_norm": 0.7346938775510204, "acc_norm_stderr": 0.028263889943784593 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8407960199004975, "acc_stderr": 0.025870646766169136, "acc_norm": 0.8407960199004975, "acc_norm_stderr": 0.025870646766169136 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.0348735088019777, "acc_norm": 0.86, "acc_norm_stderr": 0.0348735088019777 }, "harness|hendrycksTest-virology|5": { "acc": 0.5481927710843374, "acc_stderr": 0.03874371556587953, "acc_norm": 0.5481927710843374, "acc_norm_stderr": 0.03874371556587953 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.5593635250917993, "mc1_stderr": 0.017379697555437446, "mc2": 0.6980242594144186, "mc2_stderr": 0.015016792773490651 }, "harness|winogrande|5": { "acc": 0.8224151539068666, "acc_stderr": 0.010740676861359238 }, "harness|gsm8k|5": { "acc": 0.7126611068991661, "acc_stderr": 0.012464677060107086 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_rwitz2__go-bruins-v2.1.1
[ "region:us" ]
2023-12-16T14:50:27+00:00
{"pretty_name": "Evaluation run of rwitz2/go-bruins-v2.1.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [rwitz2/go-bruins-v2.1.1](https://huggingface.co/rwitz2/go-bruins-v2.1.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_rwitz2__go-bruins-v2.1.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T14:47:35.871677](https://huggingface.co/datasets/open-llm-leaderboard/details_rwitz2__go-bruins-v2.1.1/blob/main/results_2023-12-16T14-47-35.871677.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6573437520687897,\n \"acc_stderr\": 0.03198449258012776,\n \"acc_norm\": 0.6569821271376254,\n \"acc_norm_stderr\": 0.03264628402677364,\n \"mc1\": 0.5593635250917993,\n \"mc1_stderr\": 0.017379697555437446,\n \"mc2\": 0.6980242594144186,\n \"mc2_stderr\": 0.015016792773490651\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6988054607508533,\n \"acc_stderr\": 0.013406741767847629,\n \"acc_norm\": 0.7286689419795221,\n \"acc_norm_stderr\": 0.012993807727545797\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7141007767377017,\n \"acc_stderr\": 0.004509181919322842,\n \"acc_norm\": 0.8832901812387971,\n \"acc_norm_stderr\": 0.0032041800729423787\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6444444444444445,\n \"acc_stderr\": 0.04135176749720386,\n \"acc_norm\": 0.6444444444444445,\n \"acc_norm_stderr\": 0.04135176749720386\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7039473684210527,\n \"acc_stderr\": 0.03715062154998904,\n \"acc_norm\": 0.7039473684210527,\n \"acc_norm_stderr\": 0.03715062154998904\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7245283018867924,\n \"acc_stderr\": 0.027495663683724057,\n \"acc_norm\": 0.7245283018867924,\n \"acc_norm_stderr\": 0.027495663683724057\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7708333333333334,\n \"acc_stderr\": 0.03514697467862388,\n \"acc_norm\": 0.7708333333333334,\n \"acc_norm_stderr\": 0.03514697467862388\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.049888765156985884,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.049888765156985884\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6936416184971098,\n \"acc_stderr\": 0.03514942551267439,\n \"acc_norm\": 0.6936416184971098,\n \"acc_norm_stderr\": 0.03514942551267439\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.46078431372549017,\n \"acc_stderr\": 0.04959859966384181,\n \"acc_norm\": 0.46078431372549017,\n \"acc_norm_stderr\": 0.04959859966384181\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.042295258468165065,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.042295258468165065\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5787234042553191,\n \"acc_stderr\": 0.03227834510146268,\n \"acc_norm\": 0.5787234042553191,\n \"acc_norm_stderr\": 0.03227834510146268\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5087719298245614,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.5087719298245614,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5724137931034483,\n \"acc_stderr\": 0.04122737111370333,\n \"acc_norm\": 0.5724137931034483,\n \"acc_norm_stderr\": 0.04122737111370333\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42592592592592593,\n \"acc_stderr\": 0.02546714904546955,\n \"acc_norm\": 0.42592592592592593,\n \"acc_norm_stderr\": 0.02546714904546955\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.46825396825396826,\n \"acc_stderr\": 0.04463112720677172,\n \"acc_norm\": 0.46825396825396826,\n \"acc_norm_stderr\": 0.04463112720677172\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7903225806451613,\n \"acc_stderr\": 0.023157879349083525,\n \"acc_norm\": 0.7903225806451613,\n \"acc_norm_stderr\": 0.023157879349083525\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5270935960591133,\n \"acc_stderr\": 0.03512819077876106,\n \"acc_norm\": 0.5270935960591133,\n \"acc_norm_stderr\": 0.03512819077876106\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.029126522834586815,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.029126522834586815\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9067357512953368,\n \"acc_stderr\": 0.02098685459328974,\n \"acc_norm\": 0.9067357512953368,\n \"acc_norm_stderr\": 0.02098685459328974\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6641025641025641,\n \"acc_stderr\": 0.023946724741563973,\n \"acc_norm\": 0.6641025641025641,\n \"acc_norm_stderr\": 0.023946724741563973\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.32592592592592595,\n \"acc_stderr\": 0.02857834836547308,\n \"acc_norm\": 0.32592592592592595,\n \"acc_norm_stderr\": 0.02857834836547308\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6680672268907563,\n \"acc_stderr\": 0.03058869701378364,\n \"acc_norm\": 0.6680672268907563,\n \"acc_norm_stderr\": 0.03058869701378364\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3509933774834437,\n \"acc_stderr\": 0.03896981964257375,\n \"acc_norm\": 0.3509933774834437,\n \"acc_norm_stderr\": 0.03896981964257375\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8532110091743119,\n \"acc_stderr\": 0.01517314184512625,\n \"acc_norm\": 0.8532110091743119,\n \"acc_norm_stderr\": 0.01517314184512625\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5416666666666666,\n \"acc_stderr\": 0.03398110890294636,\n \"acc_norm\": 0.5416666666666666,\n \"acc_norm_stderr\": 0.03398110890294636\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8235294117647058,\n \"acc_stderr\": 0.026756401538078962,\n \"acc_norm\": 0.8235294117647058,\n \"acc_norm_stderr\": 0.026756401538078962\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8059071729957806,\n \"acc_stderr\": 0.025744902532290916,\n \"acc_norm\": 0.8059071729957806,\n \"acc_norm_stderr\": 0.025744902532290916\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6905829596412556,\n \"acc_stderr\": 0.031024411740572213,\n \"acc_norm\": 0.6905829596412556,\n \"acc_norm_stderr\": 0.031024411740572213\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8244274809160306,\n \"acc_stderr\": 0.03336820338476074,\n \"acc_norm\": 0.8244274809160306,\n \"acc_norm_stderr\": 0.03336820338476074\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8016528925619835,\n \"acc_stderr\": 0.03640118271990947,\n \"acc_norm\": 0.8016528925619835,\n \"acc_norm_stderr\": 0.03640118271990947\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7685185185185185,\n \"acc_stderr\": 0.04077494709252627,\n \"acc_norm\": 0.7685185185185185,\n \"acc_norm_stderr\": 0.04077494709252627\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7852760736196319,\n \"acc_stderr\": 0.03226219377286775,\n \"acc_norm\": 0.7852760736196319,\n \"acc_norm_stderr\": 0.03226219377286775\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4107142857142857,\n \"acc_stderr\": 0.04669510663875191,\n \"acc_norm\": 0.4107142857142857,\n \"acc_norm_stderr\": 0.04669510663875191\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7572815533980582,\n \"acc_stderr\": 0.04245022486384495,\n \"acc_norm\": 0.7572815533980582,\n \"acc_norm_stderr\": 0.04245022486384495\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8803418803418803,\n \"acc_stderr\": 0.021262719400406957,\n \"acc_norm\": 0.8803418803418803,\n \"acc_norm_stderr\": 0.021262719400406957\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8339719029374202,\n \"acc_stderr\": 0.013306478243066302,\n \"acc_norm\": 0.8339719029374202,\n \"acc_norm_stderr\": 0.013306478243066302\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7369942196531792,\n \"acc_stderr\": 0.02370309952525818,\n \"acc_norm\": 0.7369942196531792,\n \"acc_norm_stderr\": 0.02370309952525818\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4793296089385475,\n \"acc_stderr\": 0.016708205559996137,\n \"acc_norm\": 0.4793296089385475,\n \"acc_norm_stderr\": 0.016708205559996137\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7254901960784313,\n \"acc_stderr\": 0.025553169991826528,\n \"acc_norm\": 0.7254901960784313,\n \"acc_norm_stderr\": 0.025553169991826528\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7170418006430869,\n \"acc_stderr\": 0.02558306248998481,\n \"acc_norm\": 0.7170418006430869,\n \"acc_norm_stderr\": 0.02558306248998481\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7469135802469136,\n \"acc_stderr\": 0.024191808600712995,\n \"acc_norm\": 0.7469135802469136,\n \"acc_norm_stderr\": 0.024191808600712995\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.475177304964539,\n \"acc_stderr\": 0.029790719243829727,\n \"acc_norm\": 0.475177304964539,\n \"acc_norm_stderr\": 0.029790719243829727\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.47196870925684486,\n \"acc_stderr\": 0.012750151802922438,\n \"acc_norm\": 0.47196870925684486,\n \"acc_norm_stderr\": 0.012750151802922438\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6875,\n \"acc_stderr\": 0.02815637344037142,\n \"acc_norm\": 0.6875,\n \"acc_norm_stderr\": 0.02815637344037142\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6813725490196079,\n \"acc_stderr\": 0.01885008469646872,\n \"acc_norm\": 0.6813725490196079,\n \"acc_norm_stderr\": 0.01885008469646872\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6818181818181818,\n \"acc_stderr\": 0.044612721759105085,\n \"acc_norm\": 0.6818181818181818,\n \"acc_norm_stderr\": 0.044612721759105085\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7346938775510204,\n \"acc_stderr\": 0.028263889943784593,\n \"acc_norm\": 0.7346938775510204,\n \"acc_norm_stderr\": 0.028263889943784593\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.025870646766169136,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.025870646766169136\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.0348735088019777,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.0348735088019777\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5481927710843374,\n \"acc_stderr\": 0.03874371556587953,\n \"acc_norm\": 0.5481927710843374,\n \"acc_norm_stderr\": 0.03874371556587953\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5593635250917993,\n \"mc1_stderr\": 0.017379697555437446,\n \"mc2\": 0.6980242594144186,\n \"mc2_stderr\": 0.015016792773490651\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8224151539068666,\n \"acc_stderr\": 0.010740676861359238\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7126611068991661,\n \"acc_stderr\": 0.012464677060107086\n }\n}\n```", "repo_url": "https://huggingface.co/rwitz2/go-bruins-v2.1.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|arc:challenge|25_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|gsm8k|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hellaswag|10_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T14-47-35.871677.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["**/details_harness|winogrande|5_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T14-47-35.871677.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T14_47_35.871677", "path": ["results_2023-12-16T14-47-35.871677.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T14-47-35.871677.parquet"]}]}]}
2023-12-16T14:51:09+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of rwitz2/go-bruins-v2.1.1 Dataset automatically created during the evaluation run of model rwitz2/go-bruins-v2.1.1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T14:47:35.871677(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of rwitz2/go-bruins-v2.1.1\n\n\n\nDataset automatically created during the evaluation run of model rwitz2/go-bruins-v2.1.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T14:47:35.871677(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of rwitz2/go-bruins-v2.1.1\n\n\n\nDataset automatically created during the evaluation run of model rwitz2/go-bruins-v2.1.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T14:47:35.871677(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 183, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of rwitz2/go-bruins-v2.1.1\n\n\n\nDataset automatically created during the evaluation run of model rwitz2/go-bruins-v2.1.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T14:47:35.871677(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
1e0918de0a68cf5f23d17a8d2ee2a39f5c70ef4c
# Dataset Card for Evaluation run of abhinand/tamil-llama-7b-instruct-v0.1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [abhinand/tamil-llama-7b-instruct-v0.1](https://huggingface.co/abhinand/tamil-llama-7b-instruct-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_abhinand__tamil-llama-7b-instruct-v0.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T14:51:51.361679](https://huggingface.co/datasets/open-llm-leaderboard/details_abhinand__tamil-llama-7b-instruct-v0.1/blob/main/results_2023-12-16T14-51-51.361679.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.4008335001051513, "acc_stderr": 0.034109238722098915, "acc_norm": 0.406154491278963, "acc_norm_stderr": 0.03498024209123229, "mc1": 0.2802937576499388, "mc1_stderr": 0.015723139524608767, "mc2": 0.41698962752686786, "mc2_stderr": 0.014679687695881056 }, "harness|arc:challenge|25": { "acc": 0.43600682593856654, "acc_stderr": 0.014491225699230916, "acc_norm": 0.4803754266211604, "acc_norm_stderr": 0.014600132075947087 }, "harness|hellaswag|10": { "acc": 0.5172276438956384, "acc_stderr": 0.004986818680313444, "acc_norm": 0.7097191794463255, "acc_norm_stderr": 0.004529642828546402 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4074074074074074, "acc_stderr": 0.04244633238353228, "acc_norm": 0.4074074074074074, "acc_norm_stderr": 0.04244633238353228 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.40131578947368424, "acc_stderr": 0.03988903703336283, "acc_norm": 0.40131578947368424, "acc_norm_stderr": 0.03988903703336283 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.46037735849056605, "acc_stderr": 0.030676096599389184, "acc_norm": 0.46037735849056605, "acc_norm_stderr": 0.030676096599389184 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.3402777777777778, "acc_stderr": 0.03962135573486219, "acc_norm": 0.3402777777777778, "acc_norm_stderr": 0.03962135573486219 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.22, "acc_stderr": 0.041633319989322695, "acc_norm": 0.22, "acc_norm_stderr": 0.041633319989322695 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.41, "acc_stderr": 0.04943110704237102, "acc_norm": 0.41, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.3699421965317919, "acc_stderr": 0.0368122963339432, "acc_norm": 0.3699421965317919, "acc_norm_stderr": 0.0368122963339432 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237656, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237656 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.3574468085106383, "acc_stderr": 0.03132941789476425, "acc_norm": 0.3574468085106383, "acc_norm_stderr": 0.03132941789476425 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2807017543859649, "acc_stderr": 0.042270544512322004, "acc_norm": 0.2807017543859649, "acc_norm_stderr": 0.042270544512322004 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.41379310344827586, "acc_stderr": 0.04104269211806232, "acc_norm": 0.41379310344827586, "acc_norm_stderr": 0.04104269211806232 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.30158730158730157, "acc_stderr": 0.023636975996101806, "acc_norm": 0.30158730158730157, "acc_norm_stderr": 0.023636975996101806 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.35714285714285715, "acc_stderr": 0.042857142857142816, "acc_norm": 0.35714285714285715, "acc_norm_stderr": 0.042857142857142816 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.45806451612903226, "acc_stderr": 0.028343787250540625, "acc_norm": 0.45806451612903226, "acc_norm_stderr": 0.028343787250540625 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.31527093596059114, "acc_stderr": 0.03269080871970187, "acc_norm": 0.31527093596059114, "acc_norm_stderr": 0.03269080871970187 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.37575757575757573, "acc_stderr": 0.03781887353205983, "acc_norm": 0.37575757575757573, "acc_norm_stderr": 0.03781887353205983 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.4797979797979798, "acc_stderr": 0.03559443565563919, "acc_norm": 0.4797979797979798, "acc_norm_stderr": 0.03559443565563919 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.5803108808290155, "acc_stderr": 0.035615873276858834, "acc_norm": 0.5803108808290155, "acc_norm_stderr": 0.035615873276858834 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.3923076923076923, "acc_stderr": 0.02475600038213095, "acc_norm": 0.3923076923076923, "acc_norm_stderr": 0.02475600038213095 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.24444444444444444, "acc_stderr": 0.02620276653465215, "acc_norm": 0.24444444444444444, "acc_norm_stderr": 0.02620276653465215 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.03156663099215416, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.03156663099215416 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.23178807947019867, "acc_stderr": 0.03445406271987054, "acc_norm": 0.23178807947019867, "acc_norm_stderr": 0.03445406271987054 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.5009174311926605, "acc_stderr": 0.021437287056051215, "acc_norm": 0.5009174311926605, "acc_norm_stderr": 0.021437287056051215 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4305555555555556, "acc_stderr": 0.03376922151252336, "acc_norm": 0.4305555555555556, "acc_norm_stderr": 0.03376922151252336 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.4362745098039216, "acc_stderr": 0.03480693138457038, "acc_norm": 0.4362745098039216, "acc_norm_stderr": 0.03480693138457038 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.5274261603375527, "acc_stderr": 0.032498227183013026, "acc_norm": 0.5274261603375527, "acc_norm_stderr": 0.032498227183013026 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.3721973094170404, "acc_stderr": 0.03244305283008731, "acc_norm": 0.3721973094170404, "acc_norm_stderr": 0.03244305283008731 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.44274809160305345, "acc_stderr": 0.04356447202665069, "acc_norm": 0.44274809160305345, "acc_norm_stderr": 0.04356447202665069 }, "harness|hendrycksTest-international_law|5": { "acc": 0.628099173553719, "acc_stderr": 0.04412015806624504, "acc_norm": 0.628099173553719, "acc_norm_stderr": 0.04412015806624504 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.3611111111111111, "acc_stderr": 0.04643454608906274, "acc_norm": 0.3611111111111111, "acc_norm_stderr": 0.04643454608906274 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.34355828220858897, "acc_stderr": 0.03731133519673893, "acc_norm": 0.34355828220858897, "acc_norm_stderr": 0.03731133519673893 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.25, "acc_stderr": 0.04109974682633932, "acc_norm": 0.25, "acc_norm_stderr": 0.04109974682633932 }, "harness|hendrycksTest-management|5": { "acc": 0.5339805825242718, "acc_stderr": 0.0493929144727348, "acc_norm": 0.5339805825242718, "acc_norm_stderr": 0.0493929144727348 }, "harness|hendrycksTest-marketing|5": { "acc": 0.5470085470085471, "acc_stderr": 0.03261099873098619, "acc_norm": 0.5470085470085471, "acc_norm_stderr": 0.03261099873098619 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.5146871008939975, "acc_stderr": 0.017872248024429122, "acc_norm": 0.5146871008939975, "acc_norm_stderr": 0.017872248024429122 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.4046242774566474, "acc_stderr": 0.026424816594009852, "acc_norm": 0.4046242774566474, "acc_norm_stderr": 0.026424816594009852 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574915, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574915 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.4542483660130719, "acc_stderr": 0.02850980780262656, "acc_norm": 0.4542483660130719, "acc_norm_stderr": 0.02850980780262656 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.45016077170418006, "acc_stderr": 0.028256660723360187, "acc_norm": 0.45016077170418006, "acc_norm_stderr": 0.028256660723360187 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.4691358024691358, "acc_stderr": 0.027767689606833935, "acc_norm": 0.4691358024691358, "acc_norm_stderr": 0.027767689606833935 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2765957446808511, "acc_stderr": 0.026684564340460994, "acc_norm": 0.2765957446808511, "acc_norm_stderr": 0.026684564340460994 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.3246414602346806, "acc_stderr": 0.01195908938853002, "acc_norm": 0.3246414602346806, "acc_norm_stderr": 0.01195908938853002 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.4485294117647059, "acc_stderr": 0.030211479609121593, "acc_norm": 0.4485294117647059, "acc_norm_stderr": 0.030211479609121593 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.369281045751634, "acc_stderr": 0.019524316744866342, "acc_norm": 0.369281045751634, "acc_norm_stderr": 0.019524316744866342 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.37272727272727274, "acc_stderr": 0.04631381319425463, "acc_norm": 0.37272727272727274, "acc_norm_stderr": 0.04631381319425463 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.4163265306122449, "acc_stderr": 0.03155782816556165, "acc_norm": 0.4163265306122449, "acc_norm_stderr": 0.03155782816556165 }, "harness|hendrycksTest-sociology|5": { "acc": 0.4626865671641791, "acc_stderr": 0.03525675167467975, "acc_norm": 0.4626865671641791, "acc_norm_stderr": 0.03525675167467975 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-virology|5": { "acc": 0.3493975903614458, "acc_stderr": 0.03711725190740751, "acc_norm": 0.3493975903614458, "acc_norm_stderr": 0.03711725190740751 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.6023391812865497, "acc_stderr": 0.0375363895576169, "acc_norm": 0.6023391812865497, "acc_norm_stderr": 0.0375363895576169 }, "harness|truthfulqa:mc|0": { "mc1": 0.2802937576499388, "mc1_stderr": 0.015723139524608767, "mc2": 0.41698962752686786, "mc2_stderr": 0.014679687695881056 }, "harness|winogrande|5": { "acc": 0.7063930544593529, "acc_stderr": 0.012799397296204173 }, "harness|gsm8k|5": { "acc": 0.01819560272934041, "acc_stderr": 0.0036816118940738727 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_abhinand__tamil-llama-7b-instruct-v0.1
[ "region:us" ]
2023-12-16T14:54:44+00:00
{"pretty_name": "Evaluation run of abhinand/tamil-llama-7b-instruct-v0.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [abhinand/tamil-llama-7b-instruct-v0.1](https://huggingface.co/abhinand/tamil-llama-7b-instruct-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_abhinand__tamil-llama-7b-instruct-v0.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T14:51:51.361679](https://huggingface.co/datasets/open-llm-leaderboard/details_abhinand__tamil-llama-7b-instruct-v0.1/blob/main/results_2023-12-16T14-51-51.361679.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.4008335001051513,\n \"acc_stderr\": 0.034109238722098915,\n \"acc_norm\": 0.406154491278963,\n \"acc_norm_stderr\": 0.03498024209123229,\n \"mc1\": 0.2802937576499388,\n \"mc1_stderr\": 0.015723139524608767,\n \"mc2\": 0.41698962752686786,\n \"mc2_stderr\": 0.014679687695881056\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.43600682593856654,\n \"acc_stderr\": 0.014491225699230916,\n \"acc_norm\": 0.4803754266211604,\n \"acc_norm_stderr\": 0.014600132075947087\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5172276438956384,\n \"acc_stderr\": 0.004986818680313444,\n \"acc_norm\": 0.7097191794463255,\n \"acc_norm_stderr\": 0.004529642828546402\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4074074074074074,\n \"acc_stderr\": 0.04244633238353228,\n \"acc_norm\": 0.4074074074074074,\n \"acc_norm_stderr\": 0.04244633238353228\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.40131578947368424,\n \"acc_stderr\": 0.03988903703336283,\n \"acc_norm\": 0.40131578947368424,\n \"acc_norm_stderr\": 0.03988903703336283\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.46037735849056605,\n \"acc_stderr\": 0.030676096599389184,\n \"acc_norm\": 0.46037735849056605,\n \"acc_norm_stderr\": 0.030676096599389184\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.3402777777777778,\n \"acc_stderr\": 0.03962135573486219,\n \"acc_norm\": 0.3402777777777778,\n \"acc_norm_stderr\": 0.03962135573486219\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.041633319989322695,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.041633319989322695\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3699421965317919,\n \"acc_stderr\": 0.0368122963339432,\n \"acc_norm\": 0.3699421965317919,\n \"acc_norm_stderr\": 0.0368122963339432\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237656,\n \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237656\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.3574468085106383,\n \"acc_stderr\": 0.03132941789476425,\n \"acc_norm\": 0.3574468085106383,\n \"acc_norm_stderr\": 0.03132941789476425\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2807017543859649,\n \"acc_stderr\": 0.042270544512322004,\n \"acc_norm\": 0.2807017543859649,\n \"acc_norm_stderr\": 0.042270544512322004\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.41379310344827586,\n \"acc_stderr\": 0.04104269211806232,\n \"acc_norm\": 0.41379310344827586,\n \"acc_norm_stderr\": 0.04104269211806232\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.30158730158730157,\n \"acc_stderr\": 0.023636975996101806,\n \"acc_norm\": 0.30158730158730157,\n \"acc_norm_stderr\": 0.023636975996101806\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.35714285714285715,\n \"acc_stderr\": 0.042857142857142816,\n \"acc_norm\": 0.35714285714285715,\n \"acc_norm_stderr\": 0.042857142857142816\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.45806451612903226,\n \"acc_stderr\": 0.028343787250540625,\n \"acc_norm\": 0.45806451612903226,\n \"acc_norm_stderr\": 0.028343787250540625\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.31527093596059114,\n \"acc_stderr\": 0.03269080871970187,\n \"acc_norm\": 0.31527093596059114,\n \"acc_norm_stderr\": 0.03269080871970187\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.37575757575757573,\n \"acc_stderr\": 0.03781887353205983,\n \"acc_norm\": 0.37575757575757573,\n \"acc_norm_stderr\": 0.03781887353205983\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.4797979797979798,\n \"acc_stderr\": 0.03559443565563919,\n \"acc_norm\": 0.4797979797979798,\n \"acc_norm_stderr\": 0.03559443565563919\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.5803108808290155,\n \"acc_stderr\": 0.035615873276858834,\n \"acc_norm\": 0.5803108808290155,\n \"acc_norm_stderr\": 0.035615873276858834\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.3923076923076923,\n \"acc_stderr\": 0.02475600038213095,\n \"acc_norm\": 0.3923076923076923,\n \"acc_norm_stderr\": 0.02475600038213095\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.24444444444444444,\n \"acc_stderr\": 0.02620276653465215,\n \"acc_norm\": 0.24444444444444444,\n \"acc_norm_stderr\": 0.02620276653465215\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.03156663099215416,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.03156663099215416\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.23178807947019867,\n \"acc_stderr\": 0.03445406271987054,\n \"acc_norm\": 0.23178807947019867,\n \"acc_norm_stderr\": 0.03445406271987054\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.5009174311926605,\n \"acc_stderr\": 0.021437287056051215,\n \"acc_norm\": 0.5009174311926605,\n \"acc_norm_stderr\": 0.021437287056051215\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4305555555555556,\n \"acc_stderr\": 0.03376922151252336,\n \"acc_norm\": 0.4305555555555556,\n \"acc_norm_stderr\": 0.03376922151252336\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.4362745098039216,\n \"acc_stderr\": 0.03480693138457038,\n \"acc_norm\": 0.4362745098039216,\n \"acc_norm_stderr\": 0.03480693138457038\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.5274261603375527,\n \"acc_stderr\": 0.032498227183013026,\n \"acc_norm\": 0.5274261603375527,\n \"acc_norm_stderr\": 0.032498227183013026\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.3721973094170404,\n \"acc_stderr\": 0.03244305283008731,\n \"acc_norm\": 0.3721973094170404,\n \"acc_norm_stderr\": 0.03244305283008731\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.44274809160305345,\n \"acc_stderr\": 0.04356447202665069,\n \"acc_norm\": 0.44274809160305345,\n \"acc_norm_stderr\": 0.04356447202665069\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.628099173553719,\n \"acc_stderr\": 0.04412015806624504,\n \"acc_norm\": 0.628099173553719,\n \"acc_norm_stderr\": 0.04412015806624504\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.3611111111111111,\n \"acc_stderr\": 0.04643454608906274,\n \"acc_norm\": 0.3611111111111111,\n \"acc_norm_stderr\": 0.04643454608906274\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.34355828220858897,\n \"acc_stderr\": 0.03731133519673893,\n \"acc_norm\": 0.34355828220858897,\n \"acc_norm_stderr\": 0.03731133519673893\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04109974682633932,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04109974682633932\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.5339805825242718,\n \"acc_stderr\": 0.0493929144727348,\n \"acc_norm\": 0.5339805825242718,\n \"acc_norm_stderr\": 0.0493929144727348\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.5470085470085471,\n \"acc_stderr\": 0.03261099873098619,\n \"acc_norm\": 0.5470085470085471,\n \"acc_norm_stderr\": 0.03261099873098619\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.5146871008939975,\n \"acc_stderr\": 0.017872248024429122,\n \"acc_norm\": 0.5146871008939975,\n \"acc_norm_stderr\": 0.017872248024429122\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.4046242774566474,\n \"acc_stderr\": 0.026424816594009852,\n \"acc_norm\": 0.4046242774566474,\n \"acc_norm_stderr\": 0.026424816594009852\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.4542483660130719,\n \"acc_stderr\": 0.02850980780262656,\n \"acc_norm\": 0.4542483660130719,\n \"acc_norm_stderr\": 0.02850980780262656\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.45016077170418006,\n \"acc_stderr\": 0.028256660723360187,\n \"acc_norm\": 0.45016077170418006,\n \"acc_norm_stderr\": 0.028256660723360187\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.4691358024691358,\n \"acc_stderr\": 0.027767689606833935,\n \"acc_norm\": 0.4691358024691358,\n \"acc_norm_stderr\": 0.027767689606833935\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.2765957446808511,\n \"acc_stderr\": 0.026684564340460994,\n \"acc_norm\": 0.2765957446808511,\n \"acc_norm_stderr\": 0.026684564340460994\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3246414602346806,\n \"acc_stderr\": 0.01195908938853002,\n \"acc_norm\": 0.3246414602346806,\n \"acc_norm_stderr\": 0.01195908938853002\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.4485294117647059,\n \"acc_stderr\": 0.030211479609121593,\n \"acc_norm\": 0.4485294117647059,\n \"acc_norm_stderr\": 0.030211479609121593\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.369281045751634,\n \"acc_stderr\": 0.019524316744866342,\n \"acc_norm\": 0.369281045751634,\n \"acc_norm_stderr\": 0.019524316744866342\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.37272727272727274,\n \"acc_stderr\": 0.04631381319425463,\n \"acc_norm\": 0.37272727272727274,\n \"acc_norm_stderr\": 0.04631381319425463\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.4163265306122449,\n \"acc_stderr\": 0.03155782816556165,\n \"acc_norm\": 0.4163265306122449,\n \"acc_norm_stderr\": 0.03155782816556165\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.4626865671641791,\n \"acc_stderr\": 0.03525675167467975,\n \"acc_norm\": 0.4626865671641791,\n \"acc_norm_stderr\": 0.03525675167467975\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3493975903614458,\n \"acc_stderr\": 0.03711725190740751,\n \"acc_norm\": 0.3493975903614458,\n \"acc_norm_stderr\": 0.03711725190740751\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.6023391812865497,\n \"acc_stderr\": 0.0375363895576169,\n \"acc_norm\": 0.6023391812865497,\n \"acc_norm_stderr\": 0.0375363895576169\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2802937576499388,\n \"mc1_stderr\": 0.015723139524608767,\n \"mc2\": 0.41698962752686786,\n \"mc2_stderr\": 0.014679687695881056\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7063930544593529,\n \"acc_stderr\": 0.012799397296204173\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.01819560272934041,\n \"acc_stderr\": 0.0036816118940738727\n }\n}\n```", "repo_url": "https://huggingface.co/abhinand/tamil-llama-7b-instruct-v0.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|arc:challenge|25_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|gsm8k|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hellaswag|10_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T14-51-51.361679.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["**/details_harness|winogrande|5_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T14-51-51.361679.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T14_51_51.361679", "path": ["results_2023-12-16T14-51-51.361679.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T14-51-51.361679.parquet"]}]}]}
2023-12-16T14:55:25+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of abhinand/tamil-llama-7b-instruct-v0.1 Dataset automatically created during the evaluation run of model abhinand/tamil-llama-7b-instruct-v0.1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T14:51:51.361679(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of abhinand/tamil-llama-7b-instruct-v0.1\n\n\n\nDataset automatically created during the evaluation run of model abhinand/tamil-llama-7b-instruct-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T14:51:51.361679(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of abhinand/tamil-llama-7b-instruct-v0.1\n\n\n\nDataset automatically created during the evaluation run of model abhinand/tamil-llama-7b-instruct-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T14:51:51.361679(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 193, 66, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of abhinand/tamil-llama-7b-instruct-v0.1\n\n\n\nDataset automatically created during the evaluation run of model abhinand/tamil-llama-7b-instruct-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T14:51:51.361679(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
a0df8370228a9391fb335de85cb3ff7c23663647
# Dataset Card for Evaluation run of teilomillet/MiniMerlin-3B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [teilomillet/MiniMerlin-3B](https://huggingface.co/teilomillet/MiniMerlin-3B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_teilomillet__MiniMerlin-3B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T15:00:53.060575](https://huggingface.co/datasets/open-llm-leaderboard/details_teilomillet__MiniMerlin-3B/blob/main/results_2023-12-16T15-00-53.060575.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.43207336851472694, "acc_stderr": 0.03462433250134428, "acc_norm": 0.4362304045175937, "acc_norm_stderr": 0.035382366587976055, "mc1": 0.28886168910648713, "mc1_stderr": 0.015866346401384308, "mc2": 0.4706617297468041, "mc2_stderr": 0.015095071415919299 }, "harness|arc:challenge|25": { "acc": 0.38993174061433444, "acc_stderr": 0.014252959848892882, "acc_norm": 0.44368600682593856, "acc_norm_stderr": 0.014518421825670449 }, "harness|hellaswag|10": { "acc": 0.492531368253336, "acc_stderr": 0.004989224715784537, "acc_norm": 0.6656044612626967, "acc_norm_stderr": 0.0047081453934113915 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4222222222222222, "acc_stderr": 0.04266763404099582, "acc_norm": 0.4222222222222222, "acc_norm_stderr": 0.04266763404099582 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.4342105263157895, "acc_stderr": 0.04033565667848319, "acc_norm": 0.4342105263157895, "acc_norm_stderr": 0.04033565667848319 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5132075471698113, "acc_stderr": 0.03076213487450047, "acc_norm": 0.5132075471698113, "acc_norm_stderr": 0.03076213487450047 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.4444444444444444, "acc_stderr": 0.04155319955593146, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.04155319955593146 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.3988439306358382, "acc_stderr": 0.037336266553835096, "acc_norm": 0.3988439306358382, "acc_norm_stderr": 0.037336266553835096 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.27450980392156865, "acc_stderr": 0.04440521906179326, "acc_norm": 0.27450980392156865, "acc_norm_stderr": 0.04440521906179326 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.3574468085106383, "acc_stderr": 0.03132941789476425, "acc_norm": 0.3574468085106383, "acc_norm_stderr": 0.03132941789476425 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2631578947368421, "acc_stderr": 0.041424397194893624, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.041424397194893624 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.4068965517241379, "acc_stderr": 0.040937939812662374, "acc_norm": 0.4068965517241379, "acc_norm_stderr": 0.040937939812662374 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2698412698412698, "acc_stderr": 0.02286083830923207, "acc_norm": 0.2698412698412698, "acc_norm_stderr": 0.02286083830923207 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.30158730158730157, "acc_stderr": 0.04104947269903394, "acc_norm": 0.30158730158730157, "acc_norm_stderr": 0.04104947269903394 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.5032258064516129, "acc_stderr": 0.028443414226438323, "acc_norm": 0.5032258064516129, "acc_norm_stderr": 0.028443414226438323 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.3891625615763547, "acc_stderr": 0.03430462416103872, "acc_norm": 0.3891625615763547, "acc_norm_stderr": 0.03430462416103872 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.43, "acc_stderr": 0.04975698519562428, "acc_norm": 0.43, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.5212121212121212, "acc_stderr": 0.03900828913737302, "acc_norm": 0.5212121212121212, "acc_norm_stderr": 0.03900828913737302 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.5252525252525253, "acc_stderr": 0.03557806245087314, "acc_norm": 0.5252525252525253, "acc_norm_stderr": 0.03557806245087314 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.5854922279792746, "acc_stderr": 0.03555300319557668, "acc_norm": 0.5854922279792746, "acc_norm_stderr": 0.03555300319557668 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.4230769230769231, "acc_stderr": 0.02504919787604235, "acc_norm": 0.4230769230769231, "acc_norm_stderr": 0.02504919787604235 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.25555555555555554, "acc_stderr": 0.026593939101844072, "acc_norm": 0.25555555555555554, "acc_norm_stderr": 0.026593939101844072 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.3949579831932773, "acc_stderr": 0.031753678460966245, "acc_norm": 0.3949579831932773, "acc_norm_stderr": 0.031753678460966245 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3576158940397351, "acc_stderr": 0.03913453431177258, "acc_norm": 0.3576158940397351, "acc_norm_stderr": 0.03913453431177258 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.5908256880733945, "acc_stderr": 0.02108067026443373, "acc_norm": 0.5908256880733945, "acc_norm_stderr": 0.02108067026443373 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.3472222222222222, "acc_stderr": 0.032468872436376486, "acc_norm": 0.3472222222222222, "acc_norm_stderr": 0.032468872436376486 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.5196078431372549, "acc_stderr": 0.03506612560524866, "acc_norm": 0.5196078431372549, "acc_norm_stderr": 0.03506612560524866 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.5822784810126582, "acc_stderr": 0.032103530322412685, "acc_norm": 0.5822784810126582, "acc_norm_stderr": 0.032103530322412685 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.39461883408071746, "acc_stderr": 0.03280400504755291, "acc_norm": 0.39461883408071746, "acc_norm_stderr": 0.03280400504755291 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.5114503816793893, "acc_stderr": 0.043841400240780176, "acc_norm": 0.5114503816793893, "acc_norm_stderr": 0.043841400240780176 }, "harness|hendrycksTest-international_law|5": { "acc": 0.5867768595041323, "acc_stderr": 0.04495087843548408, "acc_norm": 0.5867768595041323, "acc_norm_stderr": 0.04495087843548408 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.46296296296296297, "acc_stderr": 0.04820403072760628, "acc_norm": 0.46296296296296297, "acc_norm_stderr": 0.04820403072760628 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.4539877300613497, "acc_stderr": 0.0391170190467718, "acc_norm": 0.4539877300613497, "acc_norm_stderr": 0.0391170190467718 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.2857142857142857, "acc_stderr": 0.042878587513404565, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.042878587513404565 }, "harness|hendrycksTest-management|5": { "acc": 0.5728155339805825, "acc_stderr": 0.04897957737781168, "acc_norm": 0.5728155339805825, "acc_norm_stderr": 0.04897957737781168 }, "harness|hendrycksTest-marketing|5": { "acc": 0.6623931623931624, "acc_stderr": 0.030980296992618558, "acc_norm": 0.6623931623931624, "acc_norm_stderr": 0.030980296992618558 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.47, "acc_stderr": 0.05016135580465919, "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.4955300127713921, "acc_stderr": 0.017879248970584384, "acc_norm": 0.4955300127713921, "acc_norm_stderr": 0.017879248970584384 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.45664739884393063, "acc_stderr": 0.026817718130348923, "acc_norm": 0.45664739884393063, "acc_norm_stderr": 0.026817718130348923 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23128491620111732, "acc_stderr": 0.014102223623152579, "acc_norm": 0.23128491620111732, "acc_norm_stderr": 0.014102223623152579 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5228758169934641, "acc_stderr": 0.028599936776089768, "acc_norm": 0.5228758169934641, "acc_norm_stderr": 0.028599936776089768 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.4662379421221865, "acc_stderr": 0.02833327710956278, "acc_norm": 0.4662379421221865, "acc_norm_stderr": 0.02833327710956278 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.4444444444444444, "acc_stderr": 0.027648477877413327, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.027648477877413327 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.3120567375886525, "acc_stderr": 0.027640120545169927, "acc_norm": 0.3120567375886525, "acc_norm_stderr": 0.027640120545169927 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.3272490221642764, "acc_stderr": 0.011983819806464737, "acc_norm": 0.3272490221642764, "acc_norm_stderr": 0.011983819806464737 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.33088235294117646, "acc_stderr": 0.02858270975389844, "acc_norm": 0.33088235294117646, "acc_norm_stderr": 0.02858270975389844 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.34967320261437906, "acc_stderr": 0.019291961895066368, "acc_norm": 0.34967320261437906, "acc_norm_stderr": 0.019291961895066368 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.4909090909090909, "acc_stderr": 0.04788339768702862, "acc_norm": 0.4909090909090909, "acc_norm_stderr": 0.04788339768702862 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.5142857142857142, "acc_stderr": 0.03199615232806286, "acc_norm": 0.5142857142857142, "acc_norm_stderr": 0.03199615232806286 }, "harness|hendrycksTest-sociology|5": { "acc": 0.5771144278606966, "acc_stderr": 0.034932317774212816, "acc_norm": 0.5771144278606966, "acc_norm_stderr": 0.034932317774212816 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.62, "acc_stderr": 0.04878317312145632, "acc_norm": 0.62, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-virology|5": { "acc": 0.4036144578313253, "acc_stderr": 0.038194861407583984, "acc_norm": 0.4036144578313253, "acc_norm_stderr": 0.038194861407583984 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.5029239766081871, "acc_stderr": 0.03834759370936839, "acc_norm": 0.5029239766081871, "acc_norm_stderr": 0.03834759370936839 }, "harness|truthfulqa:mc|0": { "mc1": 0.28886168910648713, "mc1_stderr": 0.015866346401384308, "mc2": 0.4706617297468041, "mc2_stderr": 0.015095071415919299 }, "harness|winogrande|5": { "acc": 0.6440410418310971, "acc_stderr": 0.013456740656273964 }, "harness|gsm8k|5": { "acc": 0.20166793025018953, "acc_stderr": 0.011052295889544378 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_teilomillet__MiniMerlin-3B
[ "region:us" ]
2023-12-16T15:03:48+00:00
{"pretty_name": "Evaluation run of teilomillet/MiniMerlin-3B", "dataset_summary": "Dataset automatically created during the evaluation run of model [teilomillet/MiniMerlin-3B](https://huggingface.co/teilomillet/MiniMerlin-3B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_teilomillet__MiniMerlin-3B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T15:00:53.060575](https://huggingface.co/datasets/open-llm-leaderboard/details_teilomillet__MiniMerlin-3B/blob/main/results_2023-12-16T15-00-53.060575.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.43207336851472694,\n \"acc_stderr\": 0.03462433250134428,\n \"acc_norm\": 0.4362304045175937,\n \"acc_norm_stderr\": 0.035382366587976055,\n \"mc1\": 0.28886168910648713,\n \"mc1_stderr\": 0.015866346401384308,\n \"mc2\": 0.4706617297468041,\n \"mc2_stderr\": 0.015095071415919299\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.38993174061433444,\n \"acc_stderr\": 0.014252959848892882,\n \"acc_norm\": 0.44368600682593856,\n \"acc_norm_stderr\": 0.014518421825670449\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.492531368253336,\n \"acc_stderr\": 0.004989224715784537,\n \"acc_norm\": 0.6656044612626967,\n \"acc_norm_stderr\": 0.0047081453934113915\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.0446196043338474,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4222222222222222,\n \"acc_stderr\": 0.04266763404099582,\n \"acc_norm\": 0.4222222222222222,\n \"acc_norm_stderr\": 0.04266763404099582\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.4342105263157895,\n \"acc_stderr\": 0.04033565667848319,\n \"acc_norm\": 0.4342105263157895,\n \"acc_norm_stderr\": 0.04033565667848319\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5132075471698113,\n \"acc_stderr\": 0.03076213487450047,\n \"acc_norm\": 0.5132075471698113,\n \"acc_norm_stderr\": 0.03076213487450047\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.04155319955593146,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.04155319955593146\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3988439306358382,\n \"acc_stderr\": 0.037336266553835096,\n \"acc_norm\": 0.3988439306358382,\n \"acc_norm_stderr\": 0.037336266553835096\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.27450980392156865,\n \"acc_stderr\": 0.04440521906179326,\n \"acc_norm\": 0.27450980392156865,\n \"acc_norm_stderr\": 0.04440521906179326\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.3574468085106383,\n \"acc_stderr\": 0.03132941789476425,\n \"acc_norm\": 0.3574468085106383,\n \"acc_norm_stderr\": 0.03132941789476425\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2631578947368421,\n \"acc_stderr\": 0.041424397194893624,\n \"acc_norm\": 0.2631578947368421,\n \"acc_norm_stderr\": 0.041424397194893624\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.4068965517241379,\n \"acc_stderr\": 0.040937939812662374,\n \"acc_norm\": 0.4068965517241379,\n \"acc_norm_stderr\": 0.040937939812662374\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2698412698412698,\n \"acc_stderr\": 0.02286083830923207,\n \"acc_norm\": 0.2698412698412698,\n \"acc_norm_stderr\": 0.02286083830923207\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.30158730158730157,\n \"acc_stderr\": 0.04104947269903394,\n \"acc_norm\": 0.30158730158730157,\n \"acc_norm_stderr\": 0.04104947269903394\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.5032258064516129,\n \"acc_stderr\": 0.028443414226438323,\n \"acc_norm\": 0.5032258064516129,\n \"acc_norm_stderr\": 0.028443414226438323\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.3891625615763547,\n \"acc_stderr\": 0.03430462416103872,\n \"acc_norm\": 0.3891625615763547,\n \"acc_norm_stderr\": 0.03430462416103872\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.5212121212121212,\n \"acc_stderr\": 0.03900828913737302,\n \"acc_norm\": 0.5212121212121212,\n \"acc_norm_stderr\": 0.03900828913737302\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.5252525252525253,\n \"acc_stderr\": 0.03557806245087314,\n \"acc_norm\": 0.5252525252525253,\n \"acc_norm_stderr\": 0.03557806245087314\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.5854922279792746,\n \"acc_stderr\": 0.03555300319557668,\n \"acc_norm\": 0.5854922279792746,\n \"acc_norm_stderr\": 0.03555300319557668\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.4230769230769231,\n \"acc_stderr\": 0.02504919787604235,\n \"acc_norm\": 0.4230769230769231,\n \"acc_norm_stderr\": 0.02504919787604235\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.25555555555555554,\n \"acc_stderr\": 0.026593939101844072,\n \"acc_norm\": 0.25555555555555554,\n \"acc_norm_stderr\": 0.026593939101844072\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.3949579831932773,\n \"acc_stderr\": 0.031753678460966245,\n \"acc_norm\": 0.3949579831932773,\n \"acc_norm_stderr\": 0.031753678460966245\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3576158940397351,\n \"acc_stderr\": 0.03913453431177258,\n \"acc_norm\": 0.3576158940397351,\n \"acc_norm_stderr\": 0.03913453431177258\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.5908256880733945,\n \"acc_stderr\": 0.02108067026443373,\n \"acc_norm\": 0.5908256880733945,\n \"acc_norm_stderr\": 0.02108067026443373\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.3472222222222222,\n \"acc_stderr\": 0.032468872436376486,\n \"acc_norm\": 0.3472222222222222,\n \"acc_norm_stderr\": 0.032468872436376486\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.5196078431372549,\n \"acc_stderr\": 0.03506612560524866,\n \"acc_norm\": 0.5196078431372549,\n \"acc_norm_stderr\": 0.03506612560524866\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.5822784810126582,\n \"acc_stderr\": 0.032103530322412685,\n \"acc_norm\": 0.5822784810126582,\n \"acc_norm_stderr\": 0.032103530322412685\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.39461883408071746,\n \"acc_stderr\": 0.03280400504755291,\n \"acc_norm\": 0.39461883408071746,\n \"acc_norm_stderr\": 0.03280400504755291\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.5114503816793893,\n \"acc_stderr\": 0.043841400240780176,\n \"acc_norm\": 0.5114503816793893,\n \"acc_norm_stderr\": 0.043841400240780176\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.5867768595041323,\n \"acc_stderr\": 0.04495087843548408,\n \"acc_norm\": 0.5867768595041323,\n \"acc_norm_stderr\": 0.04495087843548408\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.46296296296296297,\n \"acc_stderr\": 0.04820403072760628,\n \"acc_norm\": 0.46296296296296297,\n \"acc_norm_stderr\": 0.04820403072760628\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.4539877300613497,\n \"acc_stderr\": 0.0391170190467718,\n \"acc_norm\": 0.4539877300613497,\n \"acc_norm_stderr\": 0.0391170190467718\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.042878587513404565,\n \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.042878587513404565\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.5728155339805825,\n \"acc_stderr\": 0.04897957737781168,\n \"acc_norm\": 0.5728155339805825,\n \"acc_norm_stderr\": 0.04897957737781168\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.6623931623931624,\n \"acc_stderr\": 0.030980296992618558,\n \"acc_norm\": 0.6623931623931624,\n \"acc_norm_stderr\": 0.030980296992618558\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.4955300127713921,\n \"acc_stderr\": 0.017879248970584384,\n \"acc_norm\": 0.4955300127713921,\n \"acc_norm_stderr\": 0.017879248970584384\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.45664739884393063,\n \"acc_stderr\": 0.026817718130348923,\n \"acc_norm\": 0.45664739884393063,\n \"acc_norm_stderr\": 0.026817718130348923\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23128491620111732,\n \"acc_stderr\": 0.014102223623152579,\n \"acc_norm\": 0.23128491620111732,\n \"acc_norm_stderr\": 0.014102223623152579\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.5228758169934641,\n \"acc_stderr\": 0.028599936776089768,\n \"acc_norm\": 0.5228758169934641,\n \"acc_norm_stderr\": 0.028599936776089768\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.4662379421221865,\n \"acc_stderr\": 0.02833327710956278,\n \"acc_norm\": 0.4662379421221865,\n \"acc_norm_stderr\": 0.02833327710956278\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.027648477877413327,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.027648477877413327\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.3120567375886525,\n \"acc_stderr\": 0.027640120545169927,\n \"acc_norm\": 0.3120567375886525,\n \"acc_norm_stderr\": 0.027640120545169927\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3272490221642764,\n \"acc_stderr\": 0.011983819806464737,\n \"acc_norm\": 0.3272490221642764,\n \"acc_norm_stderr\": 0.011983819806464737\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.33088235294117646,\n \"acc_stderr\": 0.02858270975389844,\n \"acc_norm\": 0.33088235294117646,\n \"acc_norm_stderr\": 0.02858270975389844\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.34967320261437906,\n \"acc_stderr\": 0.019291961895066368,\n \"acc_norm\": 0.34967320261437906,\n \"acc_norm_stderr\": 0.019291961895066368\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.4909090909090909,\n \"acc_stderr\": 0.04788339768702862,\n \"acc_norm\": 0.4909090909090909,\n \"acc_norm_stderr\": 0.04788339768702862\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.5142857142857142,\n \"acc_stderr\": 0.03199615232806286,\n \"acc_norm\": 0.5142857142857142,\n \"acc_norm_stderr\": 0.03199615232806286\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.5771144278606966,\n \"acc_stderr\": 0.034932317774212816,\n \"acc_norm\": 0.5771144278606966,\n \"acc_norm_stderr\": 0.034932317774212816\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4036144578313253,\n \"acc_stderr\": 0.038194861407583984,\n \"acc_norm\": 0.4036144578313253,\n \"acc_norm_stderr\": 0.038194861407583984\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.5029239766081871,\n \"acc_stderr\": 0.03834759370936839,\n \"acc_norm\": 0.5029239766081871,\n \"acc_norm_stderr\": 0.03834759370936839\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.28886168910648713,\n \"mc1_stderr\": 0.015866346401384308,\n \"mc2\": 0.4706617297468041,\n \"mc2_stderr\": 0.015095071415919299\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6440410418310971,\n \"acc_stderr\": 0.013456740656273964\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.20166793025018953,\n \"acc_stderr\": 0.011052295889544378\n }\n}\n```", "repo_url": "https://huggingface.co/teilomillet/MiniMerlin-3B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|arc:challenge|25_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|gsm8k|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hellaswag|10_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T15-00-53.060575.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["**/details_harness|winogrande|5_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T15-00-53.060575.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T15_00_53.060575", "path": ["results_2023-12-16T15-00-53.060575.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T15-00-53.060575.parquet"]}]}]}
2023-12-16T15:04:30+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of teilomillet/MiniMerlin-3B Dataset automatically created during the evaluation run of model teilomillet/MiniMerlin-3B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T15:00:53.060575(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of teilomillet/MiniMerlin-3B\n\n\n\nDataset automatically created during the evaluation run of model teilomillet/MiniMerlin-3B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T15:00:53.060575(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of teilomillet/MiniMerlin-3B\n\n\n\nDataset automatically created during the evaluation run of model teilomillet/MiniMerlin-3B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T15:00:53.060575(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 179, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of teilomillet/MiniMerlin-3B\n\n\n\nDataset automatically created during the evaluation run of model teilomillet/MiniMerlin-3B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T15:00:53.060575(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
953dd75fd9cb6c7a953cca553255c3c15d4864f2
# Dataset Card for Evaluation run of luffycodes/vicuna-class-shishya-ac-hal-7b-ep3 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [luffycodes/vicuna-class-shishya-ac-hal-7b-ep3](https://huggingface.co/luffycodes/vicuna-class-shishya-ac-hal-7b-ep3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_luffycodes__vicuna-class-shishya-ac-hal-7b-ep3", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T15:05:04.315196](https://huggingface.co/datasets/open-llm-leaderboard/details_luffycodes__vicuna-class-shishya-ac-hal-7b-ep3/blob/main/results_2023-12-16T15-05-04.315196.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.504457394086061, "acc_stderr": 0.0339872992064852, "acc_norm": 0.5129744502613426, "acc_norm_stderr": 0.03491403416694853, "mc1": 0.2766217870257038, "mc1_stderr": 0.015659605755326923, "mc2": 0.43032912918561517, "mc2_stderr": 0.014997775568928156 }, "harness|arc:challenge|25": { "acc": 0.4249146757679181, "acc_stderr": 0.014445698968520769, "acc_norm": 0.4462457337883959, "acc_norm_stderr": 0.014526705548539982 }, "harness|hellaswag|10": { "acc": 0.5800637323242382, "acc_stderr": 0.004925394995490124, "acc_norm": 0.7697669786895041, "acc_norm_stderr": 0.004201215520808244 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.27, "acc_stderr": 0.044619604333847415, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847415 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5185185185185185, "acc_stderr": 0.043163785995113245, "acc_norm": 0.5185185185185185, "acc_norm_stderr": 0.043163785995113245 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.4868421052631579, "acc_stderr": 0.04067533136309173, "acc_norm": 0.4868421052631579, "acc_norm_stderr": 0.04067533136309173 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.569811320754717, "acc_stderr": 0.030471445867183238, "acc_norm": 0.569811320754717, "acc_norm_stderr": 0.030471445867183238 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5, "acc_stderr": 0.04181210050035455, "acc_norm": 0.5, "acc_norm_stderr": 0.04181210050035455 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.31, "acc_stderr": 0.04648231987117317, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117317 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.45664739884393063, "acc_stderr": 0.03798106566014498, "acc_norm": 0.45664739884393063, "acc_norm_stderr": 0.03798106566014498 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.18627450980392157, "acc_stderr": 0.03873958714149352, "acc_norm": 0.18627450980392157, "acc_norm_stderr": 0.03873958714149352 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.65, "acc_stderr": 0.047937248544110196, "acc_norm": 0.65, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.425531914893617, "acc_stderr": 0.03232146916224468, "acc_norm": 0.425531914893617, "acc_norm_stderr": 0.03232146916224468 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.34210526315789475, "acc_stderr": 0.04462917535336936, "acc_norm": 0.34210526315789475, "acc_norm_stderr": 0.04462917535336936 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.4896551724137931, "acc_stderr": 0.04165774775728763, "acc_norm": 0.4896551724137931, "acc_norm_stderr": 0.04165774775728763 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.335978835978836, "acc_stderr": 0.02432631052914915, "acc_norm": 0.335978835978836, "acc_norm_stderr": 0.02432631052914915 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.30952380952380953, "acc_stderr": 0.04134913018303316, "acc_norm": 0.30952380952380953, "acc_norm_stderr": 0.04134913018303316 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.5612903225806452, "acc_stderr": 0.02822949732031722, "acc_norm": 0.5612903225806452, "acc_norm_stderr": 0.02822949732031722 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.41379310344827586, "acc_stderr": 0.03465304488406795, "acc_norm": 0.41379310344827586, "acc_norm_stderr": 0.03465304488406795 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6424242424242425, "acc_stderr": 0.037425970438065864, "acc_norm": 0.6424242424242425, "acc_norm_stderr": 0.037425970438065864 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.6111111111111112, "acc_stderr": 0.0347327959083696, "acc_norm": 0.6111111111111112, "acc_norm_stderr": 0.0347327959083696 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7564766839378239, "acc_stderr": 0.030975436386845457, "acc_norm": 0.7564766839378239, "acc_norm_stderr": 0.030975436386845457 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.4846153846153846, "acc_stderr": 0.025339003010106515, "acc_norm": 0.4846153846153846, "acc_norm_stderr": 0.025339003010106515 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.27037037037037037, "acc_stderr": 0.02708037281514566, "acc_norm": 0.27037037037037037, "acc_norm_stderr": 0.02708037281514566 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.4957983193277311, "acc_stderr": 0.0324773433444811, "acc_norm": 0.4957983193277311, "acc_norm_stderr": 0.0324773433444811 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.304635761589404, "acc_stderr": 0.037579499229433426, "acc_norm": 0.304635761589404, "acc_norm_stderr": 0.037579499229433426 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7064220183486238, "acc_stderr": 0.019525151122639667, "acc_norm": 0.7064220183486238, "acc_norm_stderr": 0.019525151122639667 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.375, "acc_stderr": 0.033016908987210894, "acc_norm": 0.375, "acc_norm_stderr": 0.033016908987210894 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7107843137254902, "acc_stderr": 0.031822318676475544, "acc_norm": 0.7107843137254902, "acc_norm_stderr": 0.031822318676475544 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.6919831223628692, "acc_stderr": 0.0300523893356057, "acc_norm": 0.6919831223628692, "acc_norm_stderr": 0.0300523893356057 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6053811659192825, "acc_stderr": 0.03280400504755291, "acc_norm": 0.6053811659192825, "acc_norm_stderr": 0.03280400504755291 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6259541984732825, "acc_stderr": 0.04243869242230524, "acc_norm": 0.6259541984732825, "acc_norm_stderr": 0.04243869242230524 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6033057851239669, "acc_stderr": 0.044658697805310094, "acc_norm": 0.6033057851239669, "acc_norm_stderr": 0.044658697805310094 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.5277777777777778, "acc_stderr": 0.048262172941398944, "acc_norm": 0.5277777777777778, "acc_norm_stderr": 0.048262172941398944 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.5398773006134969, "acc_stderr": 0.03915857291436971, "acc_norm": 0.5398773006134969, "acc_norm_stderr": 0.03915857291436971 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.39285714285714285, "acc_stderr": 0.04635550135609976, "acc_norm": 0.39285714285714285, "acc_norm_stderr": 0.04635550135609976 }, "harness|hendrycksTest-management|5": { "acc": 0.6699029126213593, "acc_stderr": 0.04656147110012349, "acc_norm": 0.6699029126213593, "acc_norm_stderr": 0.04656147110012349 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7649572649572649, "acc_stderr": 0.027778835904935434, "acc_norm": 0.7649572649572649, "acc_norm_stderr": 0.027778835904935434 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.6845466155810983, "acc_stderr": 0.016617501738763397, "acc_norm": 0.6845466155810983, "acc_norm_stderr": 0.016617501738763397 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5664739884393064, "acc_stderr": 0.026680134761679214, "acc_norm": 0.5664739884393064, "acc_norm_stderr": 0.026680134761679214 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2737430167597765, "acc_stderr": 0.014912413096372434, "acc_norm": 0.2737430167597765, "acc_norm_stderr": 0.014912413096372434 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5620915032679739, "acc_stderr": 0.02840830202033269, "acc_norm": 0.5620915032679739, "acc_norm_stderr": 0.02840830202033269 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.617363344051447, "acc_stderr": 0.027604689028581993, "acc_norm": 0.617363344051447, "acc_norm_stderr": 0.027604689028581993 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5895061728395061, "acc_stderr": 0.027371350925124768, "acc_norm": 0.5895061728395061, "acc_norm_stderr": 0.027371350925124768 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.35815602836879434, "acc_stderr": 0.028602085862759412, "acc_norm": 0.35815602836879434, "acc_norm_stderr": 0.028602085862759412 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.3748370273794003, "acc_stderr": 0.012363652467551927, "acc_norm": 0.3748370273794003, "acc_norm_stderr": 0.012363652467551927 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5220588235294118, "acc_stderr": 0.03034326422421352, "acc_norm": 0.5220588235294118, "acc_norm_stderr": 0.03034326422421352 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.48856209150326796, "acc_stderr": 0.020222541515610863, "acc_norm": 0.48856209150326796, "acc_norm_stderr": 0.020222541515610863 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6181818181818182, "acc_stderr": 0.04653429807913507, "acc_norm": 0.6181818181818182, "acc_norm_stderr": 0.04653429807913507 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6163265306122448, "acc_stderr": 0.031130880396235926, "acc_norm": 0.6163265306122448, "acc_norm_stderr": 0.031130880396235926 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7213930348258707, "acc_stderr": 0.031700561834973086, "acc_norm": 0.7213930348258707, "acc_norm_stderr": 0.031700561834973086 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.72, "acc_stderr": 0.04512608598542127, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-virology|5": { "acc": 0.40963855421686746, "acc_stderr": 0.03828401115079022, "acc_norm": 0.40963855421686746, "acc_norm_stderr": 0.03828401115079022 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7309941520467836, "acc_stderr": 0.034010526201040885, "acc_norm": 0.7309941520467836, "acc_norm_stderr": 0.034010526201040885 }, "harness|truthfulqa:mc|0": { "mc1": 0.2766217870257038, "mc1_stderr": 0.015659605755326923, "mc2": 0.43032912918561517, "mc2_stderr": 0.014997775568928156 }, "harness|winogrande|5": { "acc": 0.7174427782162589, "acc_stderr": 0.012654062850971396 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_luffycodes__vicuna-class-shishya-ac-hal-7b-ep3
[ "region:us" ]
2023-12-16T15:07:59+00:00
{"pretty_name": "Evaluation run of luffycodes/vicuna-class-shishya-ac-hal-7b-ep3", "dataset_summary": "Dataset automatically created during the evaluation run of model [luffycodes/vicuna-class-shishya-ac-hal-7b-ep3](https://huggingface.co/luffycodes/vicuna-class-shishya-ac-hal-7b-ep3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_luffycodes__vicuna-class-shishya-ac-hal-7b-ep3\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T15:05:04.315196](https://huggingface.co/datasets/open-llm-leaderboard/details_luffycodes__vicuna-class-shishya-ac-hal-7b-ep3/blob/main/results_2023-12-16T15-05-04.315196.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.504457394086061,\n \"acc_stderr\": 0.0339872992064852,\n \"acc_norm\": 0.5129744502613426,\n \"acc_norm_stderr\": 0.03491403416694853,\n \"mc1\": 0.2766217870257038,\n \"mc1_stderr\": 0.015659605755326923,\n \"mc2\": 0.43032912918561517,\n \"mc2_stderr\": 0.014997775568928156\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.4249146757679181,\n \"acc_stderr\": 0.014445698968520769,\n \"acc_norm\": 0.4462457337883959,\n \"acc_norm_stderr\": 0.014526705548539982\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5800637323242382,\n \"acc_stderr\": 0.004925394995490124,\n \"acc_norm\": 0.7697669786895041,\n \"acc_norm_stderr\": 0.004201215520808244\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847415,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847415\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5185185185185185,\n \"acc_stderr\": 0.043163785995113245,\n \"acc_norm\": 0.5185185185185185,\n \"acc_norm_stderr\": 0.043163785995113245\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.4868421052631579,\n \"acc_stderr\": 0.04067533136309173,\n \"acc_norm\": 0.4868421052631579,\n \"acc_norm_stderr\": 0.04067533136309173\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.569811320754717,\n \"acc_stderr\": 0.030471445867183238,\n \"acc_norm\": 0.569811320754717,\n \"acc_norm_stderr\": 0.030471445867183238\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.04181210050035455,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.04181210050035455\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117317,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117317\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.45664739884393063,\n \"acc_stderr\": 0.03798106566014498,\n \"acc_norm\": 0.45664739884393063,\n \"acc_norm_stderr\": 0.03798106566014498\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.18627450980392157,\n \"acc_stderr\": 0.03873958714149352,\n \"acc_norm\": 0.18627450980392157,\n \"acc_norm_stderr\": 0.03873958714149352\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.425531914893617,\n \"acc_stderr\": 0.03232146916224468,\n \"acc_norm\": 0.425531914893617,\n \"acc_norm_stderr\": 0.03232146916224468\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.34210526315789475,\n \"acc_stderr\": 0.04462917535336936,\n \"acc_norm\": 0.34210526315789475,\n \"acc_norm_stderr\": 0.04462917535336936\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.4896551724137931,\n \"acc_stderr\": 0.04165774775728763,\n \"acc_norm\": 0.4896551724137931,\n \"acc_norm_stderr\": 0.04165774775728763\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.335978835978836,\n \"acc_stderr\": 0.02432631052914915,\n \"acc_norm\": 0.335978835978836,\n \"acc_norm_stderr\": 0.02432631052914915\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.30952380952380953,\n \"acc_stderr\": 0.04134913018303316,\n \"acc_norm\": 0.30952380952380953,\n \"acc_norm_stderr\": 0.04134913018303316\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.5612903225806452,\n \"acc_stderr\": 0.02822949732031722,\n \"acc_norm\": 0.5612903225806452,\n \"acc_norm_stderr\": 0.02822949732031722\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.41379310344827586,\n \"acc_stderr\": 0.03465304488406795,\n \"acc_norm\": 0.41379310344827586,\n \"acc_norm_stderr\": 0.03465304488406795\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.6424242424242425,\n \"acc_stderr\": 0.037425970438065864,\n \"acc_norm\": 0.6424242424242425,\n \"acc_norm_stderr\": 0.037425970438065864\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.6111111111111112,\n \"acc_stderr\": 0.0347327959083696,\n \"acc_norm\": 0.6111111111111112,\n \"acc_norm_stderr\": 0.0347327959083696\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.7564766839378239,\n \"acc_stderr\": 0.030975436386845457,\n \"acc_norm\": 0.7564766839378239,\n \"acc_norm_stderr\": 0.030975436386845457\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.4846153846153846,\n \"acc_stderr\": 0.025339003010106515,\n \"acc_norm\": 0.4846153846153846,\n \"acc_norm_stderr\": 0.025339003010106515\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.27037037037037037,\n \"acc_stderr\": 0.02708037281514566,\n \"acc_norm\": 0.27037037037037037,\n \"acc_norm_stderr\": 0.02708037281514566\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.4957983193277311,\n \"acc_stderr\": 0.0324773433444811,\n \"acc_norm\": 0.4957983193277311,\n \"acc_norm_stderr\": 0.0324773433444811\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.304635761589404,\n \"acc_stderr\": 0.037579499229433426,\n \"acc_norm\": 0.304635761589404,\n \"acc_norm_stderr\": 0.037579499229433426\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7064220183486238,\n \"acc_stderr\": 0.019525151122639667,\n \"acc_norm\": 0.7064220183486238,\n \"acc_norm_stderr\": 0.019525151122639667\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.375,\n \"acc_stderr\": 0.033016908987210894,\n \"acc_norm\": 0.375,\n \"acc_norm_stderr\": 0.033016908987210894\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7107843137254902,\n \"acc_stderr\": 0.031822318676475544,\n \"acc_norm\": 0.7107843137254902,\n \"acc_norm_stderr\": 0.031822318676475544\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.6919831223628692,\n \"acc_stderr\": 0.0300523893356057,\n \"acc_norm\": 0.6919831223628692,\n \"acc_norm_stderr\": 0.0300523893356057\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6053811659192825,\n \"acc_stderr\": 0.03280400504755291,\n \"acc_norm\": 0.6053811659192825,\n \"acc_norm_stderr\": 0.03280400504755291\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6259541984732825,\n \"acc_stderr\": 0.04243869242230524,\n \"acc_norm\": 0.6259541984732825,\n \"acc_norm_stderr\": 0.04243869242230524\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6033057851239669,\n \"acc_stderr\": 0.044658697805310094,\n \"acc_norm\": 0.6033057851239669,\n \"acc_norm_stderr\": 0.044658697805310094\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.5277777777777778,\n \"acc_stderr\": 0.048262172941398944,\n \"acc_norm\": 0.5277777777777778,\n \"acc_norm_stderr\": 0.048262172941398944\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.5398773006134969,\n \"acc_stderr\": 0.03915857291436971,\n \"acc_norm\": 0.5398773006134969,\n \"acc_norm_stderr\": 0.03915857291436971\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.39285714285714285,\n \"acc_stderr\": 0.04635550135609976,\n \"acc_norm\": 0.39285714285714285,\n \"acc_norm_stderr\": 0.04635550135609976\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6699029126213593,\n \"acc_stderr\": 0.04656147110012349,\n \"acc_norm\": 0.6699029126213593,\n \"acc_norm_stderr\": 0.04656147110012349\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7649572649572649,\n \"acc_stderr\": 0.027778835904935434,\n \"acc_norm\": 0.7649572649572649,\n \"acc_norm_stderr\": 0.027778835904935434\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6845466155810983,\n \"acc_stderr\": 0.016617501738763397,\n \"acc_norm\": 0.6845466155810983,\n \"acc_norm_stderr\": 0.016617501738763397\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.5664739884393064,\n \"acc_stderr\": 0.026680134761679214,\n \"acc_norm\": 0.5664739884393064,\n \"acc_norm_stderr\": 0.026680134761679214\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2737430167597765,\n \"acc_stderr\": 0.014912413096372434,\n \"acc_norm\": 0.2737430167597765,\n \"acc_norm_stderr\": 0.014912413096372434\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.5620915032679739,\n \"acc_stderr\": 0.02840830202033269,\n \"acc_norm\": 0.5620915032679739,\n \"acc_norm_stderr\": 0.02840830202033269\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.617363344051447,\n \"acc_stderr\": 0.027604689028581993,\n \"acc_norm\": 0.617363344051447,\n \"acc_norm_stderr\": 0.027604689028581993\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.5895061728395061,\n \"acc_stderr\": 0.027371350925124768,\n \"acc_norm\": 0.5895061728395061,\n \"acc_norm_stderr\": 0.027371350925124768\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.35815602836879434,\n \"acc_stderr\": 0.028602085862759412,\n \"acc_norm\": 0.35815602836879434,\n \"acc_norm_stderr\": 0.028602085862759412\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3748370273794003,\n \"acc_stderr\": 0.012363652467551927,\n \"acc_norm\": 0.3748370273794003,\n \"acc_norm_stderr\": 0.012363652467551927\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5220588235294118,\n \"acc_stderr\": 0.03034326422421352,\n \"acc_norm\": 0.5220588235294118,\n \"acc_norm_stderr\": 0.03034326422421352\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.48856209150326796,\n \"acc_stderr\": 0.020222541515610863,\n \"acc_norm\": 0.48856209150326796,\n \"acc_norm_stderr\": 0.020222541515610863\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6181818181818182,\n \"acc_stderr\": 0.04653429807913507,\n \"acc_norm\": 0.6181818181818182,\n \"acc_norm_stderr\": 0.04653429807913507\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6163265306122448,\n \"acc_stderr\": 0.031130880396235926,\n \"acc_norm\": 0.6163265306122448,\n \"acc_norm_stderr\": 0.031130880396235926\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7213930348258707,\n \"acc_stderr\": 0.031700561834973086,\n \"acc_norm\": 0.7213930348258707,\n \"acc_norm_stderr\": 0.031700561834973086\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.40963855421686746,\n \"acc_stderr\": 0.03828401115079022,\n \"acc_norm\": 0.40963855421686746,\n \"acc_norm_stderr\": 0.03828401115079022\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7309941520467836,\n \"acc_stderr\": 0.034010526201040885,\n \"acc_norm\": 0.7309941520467836,\n \"acc_norm_stderr\": 0.034010526201040885\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2766217870257038,\n \"mc1_stderr\": 0.015659605755326923,\n \"mc2\": 0.43032912918561517,\n \"mc2_stderr\": 0.014997775568928156\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7174427782162589,\n \"acc_stderr\": 0.012654062850971396\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/luffycodes/vicuna-class-shishya-ac-hal-7b-ep3", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|arc:challenge|25_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|gsm8k|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hellaswag|10_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T15-05-04.315196.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["**/details_harness|winogrande|5_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T15-05-04.315196.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T15_05_04.315196", "path": ["results_2023-12-16T15-05-04.315196.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T15-05-04.315196.parquet"]}]}]}
2023-12-16T15:08:39+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of luffycodes/vicuna-class-shishya-ac-hal-7b-ep3 Dataset automatically created during the evaluation run of model luffycodes/vicuna-class-shishya-ac-hal-7b-ep3 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T15:05:04.315196(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of luffycodes/vicuna-class-shishya-ac-hal-7b-ep3\n\n\n\nDataset automatically created during the evaluation run of model luffycodes/vicuna-class-shishya-ac-hal-7b-ep3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T15:05:04.315196(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of luffycodes/vicuna-class-shishya-ac-hal-7b-ep3\n\n\n\nDataset automatically created during the evaluation run of model luffycodes/vicuna-class-shishya-ac-hal-7b-ep3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T15:05:04.315196(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 203, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of luffycodes/vicuna-class-shishya-ac-hal-7b-ep3\n\n\n\nDataset automatically created during the evaluation run of model luffycodes/vicuna-class-shishya-ac-hal-7b-ep3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T15:05:04.315196(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
edeabf348a3fd50b1a0ec4b3d18ae95df8bf487b
# Dataset Card for Evaluation run of rishiraj/cutie <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [rishiraj/cutie](https://huggingface.co/rishiraj/cutie) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_rishiraj__cutie", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T15:05:22.803589](https://huggingface.co/datasets/open-llm-leaderboard/details_rishiraj__cutie/blob/main/results_2023-12-16T15-05-22.803589.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.2422854544334347, "acc_stderr": 0.030350774274944006, "acc_norm": 0.24263810423714618, "acc_norm_stderr": 0.03115733731927302, "mc1": 0.23255813953488372, "mc1_stderr": 0.014789157531080508, "mc2": 0.48417001401061344, "mc2_stderr": 0.016564877497923215 }, "harness|arc:challenge|25": { "acc": 0.22098976109215018, "acc_stderr": 0.012124929206818258, "acc_norm": 0.2696245733788396, "acc_norm_stderr": 0.01296804068686916 }, "harness|hellaswag|10": { "acc": 0.2561242780322645, "acc_stderr": 0.004355992090030987, "acc_norm": 0.27016530571599284, "acc_norm_stderr": 0.00443137554991136 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.2222222222222222, "acc_stderr": 0.03591444084196969, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.03591444084196969 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.14473684210526316, "acc_stderr": 0.0286319518459304, "acc_norm": 0.14473684210526316, "acc_norm_stderr": 0.0286319518459304 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.2490566037735849, "acc_stderr": 0.02661648298050171, "acc_norm": 0.2490566037735849, "acc_norm_stderr": 0.02661648298050171 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2708333333333333, "acc_stderr": 0.03716177437566015, "acc_norm": 0.2708333333333333, "acc_norm_stderr": 0.03716177437566015 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.26, "acc_stderr": 0.044084400227680814, "acc_norm": 0.26, "acc_norm_stderr": 0.044084400227680814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.21965317919075145, "acc_stderr": 0.031568093627031744, "acc_norm": 0.21965317919075145, "acc_norm_stderr": 0.031568093627031744 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.27450980392156865, "acc_stderr": 0.04440521906179325, "acc_norm": 0.27450980392156865, "acc_norm_stderr": 0.04440521906179325 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.2297872340425532, "acc_stderr": 0.027501752944412428, "acc_norm": 0.2297872340425532, "acc_norm_stderr": 0.027501752944412428 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2631578947368421, "acc_stderr": 0.04142439719489361, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.04142439719489361 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.27586206896551724, "acc_stderr": 0.037245636197746325, "acc_norm": 0.27586206896551724, "acc_norm_stderr": 0.037245636197746325 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2275132275132275, "acc_stderr": 0.021591269407823795, "acc_norm": 0.2275132275132275, "acc_norm_stderr": 0.021591269407823795 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.16666666666666666, "acc_stderr": 0.03333333333333338, "acc_norm": 0.16666666666666666, "acc_norm_stderr": 0.03333333333333338 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.17, "acc_stderr": 0.0377525168068637, "acc_norm": 0.17, "acc_norm_stderr": 0.0377525168068637 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.23548387096774193, "acc_stderr": 0.02413763242933772, "acc_norm": 0.23548387096774193, "acc_norm_stderr": 0.02413763242933772 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.19704433497536947, "acc_stderr": 0.027986724666736212, "acc_norm": 0.19704433497536947, "acc_norm_stderr": 0.027986724666736212 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.22, "acc_stderr": 0.041633319989322695, "acc_norm": 0.22, "acc_norm_stderr": 0.041633319989322695 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.2727272727272727, "acc_stderr": 0.0347769116216366, "acc_norm": 0.2727272727272727, "acc_norm_stderr": 0.0347769116216366 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.2222222222222222, "acc_stderr": 0.02962022787479049, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.02962022787479049 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.20207253886010362, "acc_stderr": 0.02897908979429673, "acc_norm": 0.20207253886010362, "acc_norm_stderr": 0.02897908979429673 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.19230769230769232, "acc_stderr": 0.019982347208637292, "acc_norm": 0.19230769230769232, "acc_norm_stderr": 0.019982347208637292 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2814814814814815, "acc_stderr": 0.027420019350945277, "acc_norm": 0.2814814814814815, "acc_norm_stderr": 0.027420019350945277 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.19327731092436976, "acc_stderr": 0.025649470265889197, "acc_norm": 0.19327731092436976, "acc_norm_stderr": 0.025649470265889197 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2913907284768212, "acc_stderr": 0.03710185726119993, "acc_norm": 0.2913907284768212, "acc_norm_stderr": 0.03710185726119993 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.23669724770642203, "acc_stderr": 0.018224078117299054, "acc_norm": 0.23669724770642203, "acc_norm_stderr": 0.018224078117299054 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.20833333333333334, "acc_stderr": 0.02769691071309394, "acc_norm": 0.20833333333333334, "acc_norm_stderr": 0.02769691071309394 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.25980392156862747, "acc_stderr": 0.030778554678693264, "acc_norm": 0.25980392156862747, "acc_norm_stderr": 0.030778554678693264 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.2489451476793249, "acc_stderr": 0.028146970599422644, "acc_norm": 0.2489451476793249, "acc_norm_stderr": 0.028146970599422644 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.3004484304932735, "acc_stderr": 0.030769352008229136, "acc_norm": 0.3004484304932735, "acc_norm_stderr": 0.030769352008229136 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.24427480916030533, "acc_stderr": 0.03768335959728742, "acc_norm": 0.24427480916030533, "acc_norm_stderr": 0.03768335959728742 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2644628099173554, "acc_stderr": 0.04026187527591205, "acc_norm": 0.2644628099173554, "acc_norm_stderr": 0.04026187527591205 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.17592592592592593, "acc_stderr": 0.036809181416738807, "acc_norm": 0.17592592592592593, "acc_norm_stderr": 0.036809181416738807 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.2392638036809816, "acc_stderr": 0.033519538795212696, "acc_norm": 0.2392638036809816, "acc_norm_stderr": 0.033519538795212696 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.32142857142857145, "acc_stderr": 0.04432804055291519, "acc_norm": 0.32142857142857145, "acc_norm_stderr": 0.04432804055291519 }, "harness|hendrycksTest-management|5": { "acc": 0.20388349514563106, "acc_stderr": 0.03989139859531773, "acc_norm": 0.20388349514563106, "acc_norm_stderr": 0.03989139859531773 }, "harness|hendrycksTest-marketing|5": { "acc": 0.28205128205128205, "acc_stderr": 0.02948036054954119, "acc_norm": 0.28205128205128205, "acc_norm_stderr": 0.02948036054954119 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.26, "acc_stderr": 0.04408440022768078, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.23754789272030652, "acc_stderr": 0.015218733046150191, "acc_norm": 0.23754789272030652, "acc_norm_stderr": 0.015218733046150191 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.2630057803468208, "acc_stderr": 0.02370309952525817, "acc_norm": 0.2630057803468208, "acc_norm_stderr": 0.02370309952525817 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23575418994413408, "acc_stderr": 0.014196375686290804, "acc_norm": 0.23575418994413408, "acc_norm_stderr": 0.014196375686290804 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.24183006535947713, "acc_stderr": 0.024518195641879334, "acc_norm": 0.24183006535947713, "acc_norm_stderr": 0.024518195641879334 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.21543408360128619, "acc_stderr": 0.023350225475471425, "acc_norm": 0.21543408360128619, "acc_norm_stderr": 0.023350225475471425 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.25617283950617287, "acc_stderr": 0.0242885336377261, "acc_norm": 0.25617283950617287, "acc_norm_stderr": 0.0242885336377261 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2553191489361702, "acc_stderr": 0.02601199293090201, "acc_norm": 0.2553191489361702, "acc_norm_stderr": 0.02601199293090201 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2470664928292047, "acc_stderr": 0.011015752255279336, "acc_norm": 0.2470664928292047, "acc_norm_stderr": 0.011015752255279336 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.18382352941176472, "acc_stderr": 0.023529242185193106, "acc_norm": 0.18382352941176472, "acc_norm_stderr": 0.023529242185193106 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.2549019607843137, "acc_stderr": 0.017630827375148383, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.017630827375148383 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.22727272727272727, "acc_stderr": 0.040139645540727756, "acc_norm": 0.22727272727272727, "acc_norm_stderr": 0.040139645540727756 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.24081632653061225, "acc_stderr": 0.027372942201788163, "acc_norm": 0.24081632653061225, "acc_norm_stderr": 0.027372942201788163 }, "harness|hendrycksTest-sociology|5": { "acc": 0.263681592039801, "acc_stderr": 0.031157150869355568, "acc_norm": 0.263681592039801, "acc_norm_stderr": 0.031157150869355568 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-virology|5": { "acc": 0.2891566265060241, "acc_stderr": 0.03529486801511115, "acc_norm": 0.2891566265060241, "acc_norm_stderr": 0.03529486801511115 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.28654970760233917, "acc_stderr": 0.03467826685703826, "acc_norm": 0.28654970760233917, "acc_norm_stderr": 0.03467826685703826 }, "harness|truthfulqa:mc|0": { "mc1": 0.23255813953488372, "mc1_stderr": 0.014789157531080508, "mc2": 0.48417001401061344, "mc2_stderr": 0.016564877497923215 }, "harness|winogrande|5": { "acc": 0.526440410418311, "acc_stderr": 0.014032823874407227 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_rishiraj__cutie
[ "region:us" ]
2023-12-16T15:08:12+00:00
{"pretty_name": "Evaluation run of rishiraj/cutie", "dataset_summary": "Dataset automatically created during the evaluation run of model [rishiraj/cutie](https://huggingface.co/rishiraj/cutie) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_rishiraj__cutie\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T15:05:22.803589](https://huggingface.co/datasets/open-llm-leaderboard/details_rishiraj__cutie/blob/main/results_2023-12-16T15-05-22.803589.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2422854544334347,\n \"acc_stderr\": 0.030350774274944006,\n \"acc_norm\": 0.24263810423714618,\n \"acc_norm_stderr\": 0.03115733731927302,\n \"mc1\": 0.23255813953488372,\n \"mc1_stderr\": 0.014789157531080508,\n \"mc2\": 0.48417001401061344,\n \"mc2_stderr\": 0.016564877497923215\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.22098976109215018,\n \"acc_stderr\": 0.012124929206818258,\n \"acc_norm\": 0.2696245733788396,\n \"acc_norm_stderr\": 0.01296804068686916\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.2561242780322645,\n \"acc_stderr\": 0.004355992090030987,\n \"acc_norm\": 0.27016530571599284,\n \"acc_norm_stderr\": 0.00443137554991136\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.2222222222222222,\n \"acc_stderr\": 0.03591444084196969,\n \"acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.03591444084196969\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.14473684210526316,\n \"acc_stderr\": 0.0286319518459304,\n \"acc_norm\": 0.14473684210526316,\n \"acc_norm_stderr\": 0.0286319518459304\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.2490566037735849,\n \"acc_stderr\": 0.02661648298050171,\n \"acc_norm\": 0.2490566037735849,\n \"acc_norm_stderr\": 0.02661648298050171\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2708333333333333,\n \"acc_stderr\": 0.03716177437566015,\n \"acc_norm\": 0.2708333333333333,\n \"acc_norm_stderr\": 0.03716177437566015\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.044084400227680814,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.044084400227680814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.21965317919075145,\n \"acc_stderr\": 0.031568093627031744,\n \"acc_norm\": 0.21965317919075145,\n \"acc_norm_stderr\": 0.031568093627031744\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.27450980392156865,\n \"acc_stderr\": 0.04440521906179325,\n \"acc_norm\": 0.27450980392156865,\n \"acc_norm_stderr\": 0.04440521906179325\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.2297872340425532,\n \"acc_stderr\": 0.027501752944412428,\n \"acc_norm\": 0.2297872340425532,\n \"acc_norm_stderr\": 0.027501752944412428\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2631578947368421,\n \"acc_stderr\": 0.04142439719489361,\n \"acc_norm\": 0.2631578947368421,\n \"acc_norm_stderr\": 0.04142439719489361\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.27586206896551724,\n \"acc_stderr\": 0.037245636197746325,\n \"acc_norm\": 0.27586206896551724,\n \"acc_norm_stderr\": 0.037245636197746325\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2275132275132275,\n \"acc_stderr\": 0.021591269407823795,\n \"acc_norm\": 0.2275132275132275,\n \"acc_norm_stderr\": 0.021591269407823795\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.16666666666666666,\n \"acc_stderr\": 0.03333333333333338,\n \"acc_norm\": 0.16666666666666666,\n \"acc_norm_stderr\": 0.03333333333333338\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.17,\n \"acc_stderr\": 0.0377525168068637,\n \"acc_norm\": 0.17,\n \"acc_norm_stderr\": 0.0377525168068637\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.23548387096774193,\n \"acc_stderr\": 0.02413763242933772,\n \"acc_norm\": 0.23548387096774193,\n \"acc_norm_stderr\": 0.02413763242933772\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.19704433497536947,\n \"acc_stderr\": 0.027986724666736212,\n \"acc_norm\": 0.19704433497536947,\n \"acc_norm_stderr\": 0.027986724666736212\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.041633319989322695,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.041633319989322695\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.2727272727272727,\n \"acc_stderr\": 0.0347769116216366,\n \"acc_norm\": 0.2727272727272727,\n \"acc_norm_stderr\": 0.0347769116216366\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.2222222222222222,\n \"acc_stderr\": 0.02962022787479049,\n \"acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.02962022787479049\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.20207253886010362,\n \"acc_stderr\": 0.02897908979429673,\n \"acc_norm\": 0.20207253886010362,\n \"acc_norm_stderr\": 0.02897908979429673\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.19230769230769232,\n \"acc_stderr\": 0.019982347208637292,\n \"acc_norm\": 0.19230769230769232,\n \"acc_norm_stderr\": 0.019982347208637292\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2814814814814815,\n \"acc_stderr\": 0.027420019350945277,\n \"acc_norm\": 0.2814814814814815,\n \"acc_norm_stderr\": 0.027420019350945277\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.19327731092436976,\n \"acc_stderr\": 0.025649470265889197,\n \"acc_norm\": 0.19327731092436976,\n \"acc_norm_stderr\": 0.025649470265889197\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2913907284768212,\n \"acc_stderr\": 0.03710185726119993,\n \"acc_norm\": 0.2913907284768212,\n \"acc_norm_stderr\": 0.03710185726119993\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.23669724770642203,\n \"acc_stderr\": 0.018224078117299054,\n \"acc_norm\": 0.23669724770642203,\n \"acc_norm_stderr\": 0.018224078117299054\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.20833333333333334,\n \"acc_stderr\": 0.02769691071309394,\n \"acc_norm\": 0.20833333333333334,\n \"acc_norm_stderr\": 0.02769691071309394\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.25980392156862747,\n \"acc_stderr\": 0.030778554678693264,\n \"acc_norm\": 0.25980392156862747,\n \"acc_norm_stderr\": 0.030778554678693264\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.2489451476793249,\n \"acc_stderr\": 0.028146970599422644,\n \"acc_norm\": 0.2489451476793249,\n \"acc_norm_stderr\": 0.028146970599422644\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.3004484304932735,\n \"acc_stderr\": 0.030769352008229136,\n \"acc_norm\": 0.3004484304932735,\n \"acc_norm_stderr\": 0.030769352008229136\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.24427480916030533,\n \"acc_stderr\": 0.03768335959728742,\n \"acc_norm\": 0.24427480916030533,\n \"acc_norm_stderr\": 0.03768335959728742\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.2644628099173554,\n \"acc_stderr\": 0.04026187527591205,\n \"acc_norm\": 0.2644628099173554,\n \"acc_norm_stderr\": 0.04026187527591205\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.17592592592592593,\n \"acc_stderr\": 0.036809181416738807,\n \"acc_norm\": 0.17592592592592593,\n \"acc_norm_stderr\": 0.036809181416738807\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.2392638036809816,\n \"acc_stderr\": 0.033519538795212696,\n \"acc_norm\": 0.2392638036809816,\n \"acc_norm_stderr\": 0.033519538795212696\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.32142857142857145,\n \"acc_stderr\": 0.04432804055291519,\n \"acc_norm\": 0.32142857142857145,\n \"acc_norm_stderr\": 0.04432804055291519\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.20388349514563106,\n \"acc_stderr\": 0.03989139859531773,\n \"acc_norm\": 0.20388349514563106,\n \"acc_norm_stderr\": 0.03989139859531773\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.28205128205128205,\n \"acc_stderr\": 0.02948036054954119,\n \"acc_norm\": 0.28205128205128205,\n \"acc_norm_stderr\": 0.02948036054954119\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.23754789272030652,\n \"acc_stderr\": 0.015218733046150191,\n \"acc_norm\": 0.23754789272030652,\n \"acc_norm_stderr\": 0.015218733046150191\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.2630057803468208,\n \"acc_stderr\": 0.02370309952525817,\n \"acc_norm\": 0.2630057803468208,\n \"acc_norm_stderr\": 0.02370309952525817\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23575418994413408,\n \"acc_stderr\": 0.014196375686290804,\n \"acc_norm\": 0.23575418994413408,\n \"acc_norm_stderr\": 0.014196375686290804\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.24183006535947713,\n \"acc_stderr\": 0.024518195641879334,\n \"acc_norm\": 0.24183006535947713,\n \"acc_norm_stderr\": 0.024518195641879334\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.21543408360128619,\n \"acc_stderr\": 0.023350225475471425,\n \"acc_norm\": 0.21543408360128619,\n \"acc_norm_stderr\": 0.023350225475471425\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.25617283950617287,\n \"acc_stderr\": 0.0242885336377261,\n \"acc_norm\": 0.25617283950617287,\n \"acc_norm_stderr\": 0.0242885336377261\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.2553191489361702,\n \"acc_stderr\": 0.02601199293090201,\n \"acc_norm\": 0.2553191489361702,\n \"acc_norm_stderr\": 0.02601199293090201\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2470664928292047,\n \"acc_stderr\": 0.011015752255279336,\n \"acc_norm\": 0.2470664928292047,\n \"acc_norm_stderr\": 0.011015752255279336\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.18382352941176472,\n \"acc_stderr\": 0.023529242185193106,\n \"acc_norm\": 0.18382352941176472,\n \"acc_norm_stderr\": 0.023529242185193106\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.2549019607843137,\n \"acc_stderr\": 0.017630827375148383,\n \"acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.017630827375148383\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.22727272727272727,\n \"acc_stderr\": 0.040139645540727756,\n \"acc_norm\": 0.22727272727272727,\n \"acc_norm_stderr\": 0.040139645540727756\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.24081632653061225,\n \"acc_stderr\": 0.027372942201788163,\n \"acc_norm\": 0.24081632653061225,\n \"acc_norm_stderr\": 0.027372942201788163\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.263681592039801,\n \"acc_stderr\": 0.031157150869355568,\n \"acc_norm\": 0.263681592039801,\n \"acc_norm_stderr\": 0.031157150869355568\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.2891566265060241,\n \"acc_stderr\": 0.03529486801511115,\n \"acc_norm\": 0.2891566265060241,\n \"acc_norm_stderr\": 0.03529486801511115\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.28654970760233917,\n \"acc_stderr\": 0.03467826685703826,\n \"acc_norm\": 0.28654970760233917,\n \"acc_norm_stderr\": 0.03467826685703826\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.23255813953488372,\n \"mc1_stderr\": 0.014789157531080508,\n \"mc2\": 0.48417001401061344,\n \"mc2_stderr\": 0.016564877497923215\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.526440410418311,\n \"acc_stderr\": 0.014032823874407227\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/rishiraj/cutie", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|arc:challenge|25_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|gsm8k|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hellaswag|10_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T15-05-22.803589.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["**/details_harness|winogrande|5_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T15-05-22.803589.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T15_05_22.803589", "path": ["results_2023-12-16T15-05-22.803589.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T15-05-22.803589.parquet"]}]}]}
2023-12-16T15:08:54+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of rishiraj/cutie Dataset automatically created during the evaluation run of model rishiraj/cutie on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T15:05:22.803589(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of rishiraj/cutie\n\n\n\nDataset automatically created during the evaluation run of model rishiraj/cutie on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T15:05:22.803589(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of rishiraj/cutie\n\n\n\nDataset automatically created during the evaluation run of model rishiraj/cutie on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T15:05:22.803589(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 173, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of rishiraj/cutie\n\n\n\nDataset automatically created during the evaluation run of model rishiraj/cutie on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T15:05:22.803589(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
57775dfec69125d1fe96dd27490ed9b50a1e8c6c
# Dataset Card for Evaluation run of Toten5/LeoScorpius-GreenNode-7B-v1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Toten5/LeoScorpius-GreenNode-7B-v1](https://huggingface.co/Toten5/LeoScorpius-GreenNode-7B-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Toten5__LeoScorpius-GreenNode-7B-v1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T15:05:24.026607](https://huggingface.co/datasets/open-llm-leaderboard/details_Toten5__LeoScorpius-GreenNode-7B-v1/blob/main/results_2023-12-16T15-05-24.026607.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6581745324655823, "acc_stderr": 0.031959438135900374, "acc_norm": 0.6578427283037575, "acc_norm_stderr": 0.03262138237063813, "mc1": 0.5532435740514076, "mc1_stderr": 0.017403977522557148, "mc2": 0.6940717765129405, "mc2_stderr": 0.015012098387980712 }, "harness|arc:challenge|25": { "acc": 0.6936860068259386, "acc_stderr": 0.013470584417276513, "acc_norm": 0.7209897610921502, "acc_norm_stderr": 0.013106784883601329 }, "harness|hellaswag|10": { "acc": 0.709520015933081, "acc_stderr": 0.004530560646902538, "acc_norm": 0.8813981278629756, "acc_norm_stderr": 0.0032265867834212927 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6518518518518519, "acc_stderr": 0.041153246103369526, "acc_norm": 0.6518518518518519, "acc_norm_stderr": 0.041153246103369526 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6973684210526315, "acc_stderr": 0.037385206761196686, "acc_norm": 0.6973684210526315, "acc_norm_stderr": 0.037385206761196686 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.62, "acc_stderr": 0.048783173121456316, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7320754716981132, "acc_stderr": 0.027257260322494845, "acc_norm": 0.7320754716981132, "acc_norm_stderr": 0.027257260322494845 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7777777777777778, "acc_stderr": 0.03476590104304134, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.03476590104304134 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.56, "acc_stderr": 0.049888765156985884, "acc_norm": 0.56, "acc_norm_stderr": 0.049888765156985884 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6878612716763006, "acc_stderr": 0.035331333893236574, "acc_norm": 0.6878612716763006, "acc_norm_stderr": 0.035331333893236574 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.46078431372549017, "acc_stderr": 0.04959859966384181, "acc_norm": 0.46078431372549017, "acc_norm_stderr": 0.04959859966384181 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.042295258468165065, "acc_norm": 0.77, "acc_norm_stderr": 0.042295258468165065 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.574468085106383, "acc_stderr": 0.03232146916224468, "acc_norm": 0.574468085106383, "acc_norm_stderr": 0.03232146916224468 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5724137931034483, "acc_stderr": 0.04122737111370332, "acc_norm": 0.5724137931034483, "acc_norm_stderr": 0.04122737111370332 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.41798941798941797, "acc_stderr": 0.025402555503260912, "acc_norm": 0.41798941798941797, "acc_norm_stderr": 0.025402555503260912 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4603174603174603, "acc_stderr": 0.04458029125470973, "acc_norm": 0.4603174603174603, "acc_norm_stderr": 0.04458029125470973 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.04793724854411019, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411019 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7774193548387097, "acc_stderr": 0.023664216671642518, "acc_norm": 0.7774193548387097, "acc_norm_stderr": 0.023664216671642518 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5270935960591133, "acc_stderr": 0.03512819077876106, "acc_norm": 0.5270935960591133, "acc_norm_stderr": 0.03512819077876106 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7757575757575758, "acc_stderr": 0.03256866661681102, "acc_norm": 0.7757575757575758, "acc_norm_stderr": 0.03256866661681102 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7929292929292929, "acc_stderr": 0.028869778460267042, "acc_norm": 0.7929292929292929, "acc_norm_stderr": 0.028869778460267042 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8963730569948186, "acc_stderr": 0.02199531196364424, "acc_norm": 0.8963730569948186, "acc_norm_stderr": 0.02199531196364424 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6666666666666666, "acc_stderr": 0.023901157979402538, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.023901157979402538 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.35185185185185186, "acc_stderr": 0.029116617606083008, "acc_norm": 0.35185185185185186, "acc_norm_stderr": 0.029116617606083008 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6764705882352942, "acc_stderr": 0.03038835355188679, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.03038835355188679 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3443708609271523, "acc_stderr": 0.038796870240733264, "acc_norm": 0.3443708609271523, "acc_norm_stderr": 0.038796870240733264 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8532110091743119, "acc_stderr": 0.01517314184512625, "acc_norm": 0.8532110091743119, "acc_norm_stderr": 0.01517314184512625 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5324074074074074, "acc_stderr": 0.03402801581358966, "acc_norm": 0.5324074074074074, "acc_norm_stderr": 0.03402801581358966 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8382352941176471, "acc_stderr": 0.025845017986926917, "acc_norm": 0.8382352941176471, "acc_norm_stderr": 0.025845017986926917 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8016877637130801, "acc_stderr": 0.025955020841621112, "acc_norm": 0.8016877637130801, "acc_norm_stderr": 0.025955020841621112 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.695067264573991, "acc_stderr": 0.030898610882477515, "acc_norm": 0.695067264573991, "acc_norm_stderr": 0.030898610882477515 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8244274809160306, "acc_stderr": 0.03336820338476074, "acc_norm": 0.8244274809160306, "acc_norm_stderr": 0.03336820338476074 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8016528925619835, "acc_stderr": 0.03640118271990947, "acc_norm": 0.8016528925619835, "acc_norm_stderr": 0.03640118271990947 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7962962962962963, "acc_stderr": 0.03893542518824847, "acc_norm": 0.7962962962962963, "acc_norm_stderr": 0.03893542518824847 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7791411042944786, "acc_stderr": 0.03259177392742179, "acc_norm": 0.7791411042944786, "acc_norm_stderr": 0.03259177392742179 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.42857142857142855, "acc_stderr": 0.04697113923010212, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.04697113923010212 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8717948717948718, "acc_stderr": 0.02190190511507333, "acc_norm": 0.8717948717948718, "acc_norm_stderr": 0.02190190511507333 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8365261813537676, "acc_stderr": 0.013223928616741617, "acc_norm": 0.8365261813537676, "acc_norm_stderr": 0.013223928616741617 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7456647398843931, "acc_stderr": 0.02344582627654554, "acc_norm": 0.7456647398843931, "acc_norm_stderr": 0.02344582627654554 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.47262569832402235, "acc_stderr": 0.016697420650642752, "acc_norm": 0.47262569832402235, "acc_norm_stderr": 0.016697420650642752 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7287581699346405, "acc_stderr": 0.02545775669666788, "acc_norm": 0.7287581699346405, "acc_norm_stderr": 0.02545775669666788 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7202572347266881, "acc_stderr": 0.025494259350694912, "acc_norm": 0.7202572347266881, "acc_norm_stderr": 0.025494259350694912 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7438271604938271, "acc_stderr": 0.0242885336377261, "acc_norm": 0.7438271604938271, "acc_norm_stderr": 0.0242885336377261 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.48936170212765956, "acc_stderr": 0.029820747191422473, "acc_norm": 0.48936170212765956, "acc_norm_stderr": 0.029820747191422473 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4726205997392438, "acc_stderr": 0.012751075788015058, "acc_norm": 0.4726205997392438, "acc_norm_stderr": 0.012751075788015058 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6875, "acc_stderr": 0.02815637344037142, "acc_norm": 0.6875, "acc_norm_stderr": 0.02815637344037142 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6699346405228758, "acc_stderr": 0.019023726160724553, "acc_norm": 0.6699346405228758, "acc_norm_stderr": 0.019023726160724553 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7224489795918367, "acc_stderr": 0.028666857790274648, "acc_norm": 0.7224489795918367, "acc_norm_stderr": 0.028666857790274648 }, "harness|hendrycksTest-sociology|5": { "acc": 0.835820895522388, "acc_stderr": 0.026193923544454115, "acc_norm": 0.835820895522388, "acc_norm_stderr": 0.026193923544454115 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.0348735088019777, "acc_norm": 0.86, "acc_norm_stderr": 0.0348735088019777 }, "harness|hendrycksTest-virology|5": { "acc": 0.5481927710843374, "acc_stderr": 0.03874371556587953, "acc_norm": 0.5481927710843374, "acc_norm_stderr": 0.03874371556587953 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8245614035087719, "acc_stderr": 0.029170885500727665, "acc_norm": 0.8245614035087719, "acc_norm_stderr": 0.029170885500727665 }, "harness|truthfulqa:mc|0": { "mc1": 0.5532435740514076, "mc1_stderr": 0.017403977522557148, "mc2": 0.6940717765129405, "mc2_stderr": 0.015012098387980712 }, "harness|winogrande|5": { "acc": 0.8232044198895028, "acc_stderr": 0.01072192328791875 }, "harness|gsm8k|5": { "acc": 0.7119029567854435, "acc_stderr": 0.01247446973719792 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Toten5__LeoScorpius-GreenNode-7B-v1
[ "region:us" ]
2023-12-16T15:08:13+00:00
{"pretty_name": "Evaluation run of Toten5/LeoScorpius-GreenNode-7B-v1", "dataset_summary": "Dataset automatically created during the evaluation run of model [Toten5/LeoScorpius-GreenNode-7B-v1](https://huggingface.co/Toten5/LeoScorpius-GreenNode-7B-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Toten5__LeoScorpius-GreenNode-7B-v1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T15:05:24.026607](https://huggingface.co/datasets/open-llm-leaderboard/details_Toten5__LeoScorpius-GreenNode-7B-v1/blob/main/results_2023-12-16T15-05-24.026607.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6581745324655823,\n \"acc_stderr\": 0.031959438135900374,\n \"acc_norm\": 0.6578427283037575,\n \"acc_norm_stderr\": 0.03262138237063813,\n \"mc1\": 0.5532435740514076,\n \"mc1_stderr\": 0.017403977522557148,\n \"mc2\": 0.6940717765129405,\n \"mc2_stderr\": 0.015012098387980712\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6936860068259386,\n \"acc_stderr\": 0.013470584417276513,\n \"acc_norm\": 0.7209897610921502,\n \"acc_norm_stderr\": 0.013106784883601329\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.709520015933081,\n \"acc_stderr\": 0.004530560646902538,\n \"acc_norm\": 0.8813981278629756,\n \"acc_norm_stderr\": 0.0032265867834212927\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6518518518518519,\n \"acc_stderr\": 0.041153246103369526,\n \"acc_norm\": 0.6518518518518519,\n \"acc_norm_stderr\": 0.041153246103369526\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6973684210526315,\n \"acc_stderr\": 0.037385206761196686,\n \"acc_norm\": 0.6973684210526315,\n \"acc_norm_stderr\": 0.037385206761196686\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7320754716981132,\n \"acc_stderr\": 0.027257260322494845,\n \"acc_norm\": 0.7320754716981132,\n \"acc_norm_stderr\": 0.027257260322494845\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.03476590104304134,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.03476590104304134\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.049888765156985884,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.049888765156985884\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6878612716763006,\n \"acc_stderr\": 0.035331333893236574,\n \"acc_norm\": 0.6878612716763006,\n \"acc_norm_stderr\": 0.035331333893236574\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.46078431372549017,\n \"acc_stderr\": 0.04959859966384181,\n \"acc_norm\": 0.46078431372549017,\n \"acc_norm_stderr\": 0.04959859966384181\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.042295258468165065,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.042295258468165065\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.574468085106383,\n \"acc_stderr\": 0.03232146916224468,\n \"acc_norm\": 0.574468085106383,\n \"acc_norm_stderr\": 0.03232146916224468\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5724137931034483,\n \"acc_stderr\": 0.04122737111370332,\n \"acc_norm\": 0.5724137931034483,\n \"acc_norm_stderr\": 0.04122737111370332\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41798941798941797,\n \"acc_stderr\": 0.025402555503260912,\n \"acc_norm\": 0.41798941798941797,\n \"acc_norm_stderr\": 0.025402555503260912\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4603174603174603,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.4603174603174603,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411019,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411019\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7774193548387097,\n \"acc_stderr\": 0.023664216671642518,\n \"acc_norm\": 0.7774193548387097,\n \"acc_norm_stderr\": 0.023664216671642518\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5270935960591133,\n \"acc_stderr\": 0.03512819077876106,\n \"acc_norm\": 0.5270935960591133,\n \"acc_norm_stderr\": 0.03512819077876106\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.03256866661681102,\n \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.03256866661681102\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7929292929292929,\n \"acc_stderr\": 0.028869778460267042,\n \"acc_norm\": 0.7929292929292929,\n \"acc_norm_stderr\": 0.028869778460267042\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.02199531196364424,\n \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.02199531196364424\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.023901157979402538,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.023901157979402538\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.35185185185185186,\n \"acc_stderr\": 0.029116617606083008,\n \"acc_norm\": 0.35185185185185186,\n \"acc_norm_stderr\": 0.029116617606083008\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.03038835355188679,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.03038835355188679\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3443708609271523,\n \"acc_stderr\": 0.038796870240733264,\n \"acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.038796870240733264\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8532110091743119,\n \"acc_stderr\": 0.01517314184512625,\n \"acc_norm\": 0.8532110091743119,\n \"acc_norm_stderr\": 0.01517314184512625\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5324074074074074,\n \"acc_stderr\": 0.03402801581358966,\n \"acc_norm\": 0.5324074074074074,\n \"acc_norm_stderr\": 0.03402801581358966\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8382352941176471,\n \"acc_stderr\": 0.025845017986926917,\n \"acc_norm\": 0.8382352941176471,\n \"acc_norm_stderr\": 0.025845017986926917\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8016877637130801,\n \"acc_stderr\": 0.025955020841621112,\n \"acc_norm\": 0.8016877637130801,\n \"acc_norm_stderr\": 0.025955020841621112\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.695067264573991,\n \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8244274809160306,\n \"acc_stderr\": 0.03336820338476074,\n \"acc_norm\": 0.8244274809160306,\n \"acc_norm_stderr\": 0.03336820338476074\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8016528925619835,\n \"acc_stderr\": 0.03640118271990947,\n \"acc_norm\": 0.8016528925619835,\n \"acc_norm_stderr\": 0.03640118271990947\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7962962962962963,\n \"acc_stderr\": 0.03893542518824847,\n \"acc_norm\": 0.7962962962962963,\n \"acc_norm_stderr\": 0.03893542518824847\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7791411042944786,\n \"acc_stderr\": 0.03259177392742179,\n \"acc_norm\": 0.7791411042944786,\n \"acc_norm_stderr\": 0.03259177392742179\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.04697113923010212,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.04697113923010212\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8717948717948718,\n \"acc_stderr\": 0.02190190511507333,\n \"acc_norm\": 0.8717948717948718,\n \"acc_norm_stderr\": 0.02190190511507333\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8365261813537676,\n \"acc_stderr\": 0.013223928616741617,\n \"acc_norm\": 0.8365261813537676,\n \"acc_norm_stderr\": 0.013223928616741617\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7456647398843931,\n \"acc_stderr\": 0.02344582627654554,\n \"acc_norm\": 0.7456647398843931,\n \"acc_norm_stderr\": 0.02344582627654554\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.47262569832402235,\n \"acc_stderr\": 0.016697420650642752,\n \"acc_norm\": 0.47262569832402235,\n \"acc_norm_stderr\": 0.016697420650642752\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7287581699346405,\n \"acc_stderr\": 0.02545775669666788,\n \"acc_norm\": 0.7287581699346405,\n \"acc_norm_stderr\": 0.02545775669666788\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7202572347266881,\n \"acc_stderr\": 0.025494259350694912,\n \"acc_norm\": 0.7202572347266881,\n \"acc_norm_stderr\": 0.025494259350694912\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7438271604938271,\n \"acc_stderr\": 0.0242885336377261,\n \"acc_norm\": 0.7438271604938271,\n \"acc_norm_stderr\": 0.0242885336377261\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.48936170212765956,\n \"acc_stderr\": 0.029820747191422473,\n \"acc_norm\": 0.48936170212765956,\n \"acc_norm_stderr\": 0.029820747191422473\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4726205997392438,\n \"acc_stderr\": 0.012751075788015058,\n \"acc_norm\": 0.4726205997392438,\n \"acc_norm_stderr\": 0.012751075788015058\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6875,\n \"acc_stderr\": 0.02815637344037142,\n \"acc_norm\": 0.6875,\n \"acc_norm_stderr\": 0.02815637344037142\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6699346405228758,\n \"acc_stderr\": 0.019023726160724553,\n \"acc_norm\": 0.6699346405228758,\n \"acc_norm_stderr\": 0.019023726160724553\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7224489795918367,\n \"acc_stderr\": 0.028666857790274648,\n \"acc_norm\": 0.7224489795918367,\n \"acc_norm_stderr\": 0.028666857790274648\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.026193923544454115,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.026193923544454115\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.0348735088019777,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.0348735088019777\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5481927710843374,\n \"acc_stderr\": 0.03874371556587953,\n \"acc_norm\": 0.5481927710843374,\n \"acc_norm_stderr\": 0.03874371556587953\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8245614035087719,\n \"acc_stderr\": 0.029170885500727665,\n \"acc_norm\": 0.8245614035087719,\n \"acc_norm_stderr\": 0.029170885500727665\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5532435740514076,\n \"mc1_stderr\": 0.017403977522557148,\n \"mc2\": 0.6940717765129405,\n \"mc2_stderr\": 0.015012098387980712\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8232044198895028,\n \"acc_stderr\": 0.01072192328791875\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7119029567854435,\n \"acc_stderr\": 0.01247446973719792\n }\n}\n```", "repo_url": "https://huggingface.co/Toten5/LeoScorpius-GreenNode-7B-v1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|arc:challenge|25_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|gsm8k|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hellaswag|10_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T15-05-24.026607.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["**/details_harness|winogrande|5_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T15-05-24.026607.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T15_05_24.026607", "path": ["results_2023-12-16T15-05-24.026607.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T15-05-24.026607.parquet"]}]}]}
2023-12-16T15:09:02+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Toten5/LeoScorpius-GreenNode-7B-v1 Dataset automatically created during the evaluation run of model Toten5/LeoScorpius-GreenNode-7B-v1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T15:05:24.026607(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Toten5/LeoScorpius-GreenNode-7B-v1\n\n\n\nDataset automatically created during the evaluation run of model Toten5/LeoScorpius-GreenNode-7B-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T15:05:24.026607(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Toten5/LeoScorpius-GreenNode-7B-v1\n\n\n\nDataset automatically created during the evaluation run of model Toten5/LeoScorpius-GreenNode-7B-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T15:05:24.026607(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 197, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Toten5/LeoScorpius-GreenNode-7B-v1\n\n\n\nDataset automatically created during the evaluation run of model Toten5/LeoScorpius-GreenNode-7B-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T15:05:24.026607(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
65a1b794ed1045c18390322d125a8561ddb7674f
# Dataset Card for Evaluation run of GreenNode/GreenNodeLM-7B-v2leo <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [GreenNode/GreenNodeLM-7B-v2leo](https://huggingface.co/GreenNode/GreenNodeLM-7B-v2leo) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_GreenNode__GreenNodeLM-7B-v2leo", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T15:11:27.236820](https://huggingface.co/datasets/open-llm-leaderboard/details_GreenNode__GreenNodeLM-7B-v2leo/blob/main/results_2023-12-16T15-11-27.236820.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6544526838897918, "acc_stderr": 0.032061359095590616, "acc_norm": 0.6547329571965785, "acc_norm_stderr": 0.03271846530737067, "mc1": 0.5397796817625459, "mc1_stderr": 0.017448017223960874, "mc2": 0.6783429669391698, "mc2_stderr": 0.015130110237542948 }, "harness|arc:challenge|25": { "acc": 0.6697952218430034, "acc_stderr": 0.013743085603760427, "acc_norm": 0.6979522184300341, "acc_norm_stderr": 0.013417519144716413 }, "harness|hellaswag|10": { "acc": 0.7097191794463255, "acc_stderr": 0.004529642828546397, "acc_norm": 0.8802031467835093, "acc_norm_stderr": 0.0032406018831804884 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.047609522856952365, "acc_norm": 0.34, "acc_norm_stderr": 0.047609522856952365 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6666666666666666, "acc_stderr": 0.04072314811876837, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.04072314811876837 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6842105263157895, "acc_stderr": 0.0378272898086547, "acc_norm": 0.6842105263157895, "acc_norm_stderr": 0.0378272898086547 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.62, "acc_stderr": 0.048783173121456316, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.720754716981132, "acc_stderr": 0.027611163402399715, "acc_norm": 0.720754716981132, "acc_norm_stderr": 0.027611163402399715 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7430555555555556, "acc_stderr": 0.03653946969442099, "acc_norm": 0.7430555555555556, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.04560480215720683, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720683 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6416184971098265, "acc_stderr": 0.036563436533531585, "acc_norm": 0.6416184971098265, "acc_norm_stderr": 0.036563436533531585 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4019607843137255, "acc_stderr": 0.04878608714466996, "acc_norm": 0.4019607843137255, "acc_norm_stderr": 0.04878608714466996 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.79, "acc_stderr": 0.04093601807403326, "acc_norm": 0.79, "acc_norm_stderr": 0.04093601807403326 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5659574468085107, "acc_stderr": 0.03240038086792747, "acc_norm": 0.5659574468085107, "acc_norm_stderr": 0.03240038086792747 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.47368421052631576, "acc_stderr": 0.046970851366478626, "acc_norm": 0.47368421052631576, "acc_norm_stderr": 0.046970851366478626 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6068965517241379, "acc_stderr": 0.0407032901370707, "acc_norm": 0.6068965517241379, "acc_norm_stderr": 0.0407032901370707 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4470899470899471, "acc_stderr": 0.02560672399577702, "acc_norm": 0.4470899470899471, "acc_norm_stderr": 0.02560672399577702 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4523809523809524, "acc_stderr": 0.044518079590553275, "acc_norm": 0.4523809523809524, "acc_norm_stderr": 0.044518079590553275 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7774193548387097, "acc_stderr": 0.023664216671642518, "acc_norm": 0.7774193548387097, "acc_norm_stderr": 0.023664216671642518 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5073891625615764, "acc_stderr": 0.035176035403610105, "acc_norm": 0.5073891625615764, "acc_norm_stderr": 0.035176035403610105 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7636363636363637, "acc_stderr": 0.03317505930009182, "acc_norm": 0.7636363636363637, "acc_norm_stderr": 0.03317505930009182 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7878787878787878, "acc_stderr": 0.029126522834586818, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.029126522834586818 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8808290155440415, "acc_stderr": 0.023381935348121427, "acc_norm": 0.8808290155440415, "acc_norm_stderr": 0.023381935348121427 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6666666666666666, "acc_stderr": 0.023901157979402534, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.023901157979402534 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.37407407407407406, "acc_stderr": 0.029502861128955293, "acc_norm": 0.37407407407407406, "acc_norm_stderr": 0.029502861128955293 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7016806722689075, "acc_stderr": 0.029719142876342853, "acc_norm": 0.7016806722689075, "acc_norm_stderr": 0.029719142876342853 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.03861557546255169, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.03861557546255169 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8366972477064221, "acc_stderr": 0.01584825580650155, "acc_norm": 0.8366972477064221, "acc_norm_stderr": 0.01584825580650155 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5231481481481481, "acc_stderr": 0.03406315360711507, "acc_norm": 0.5231481481481481, "acc_norm_stderr": 0.03406315360711507 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8186274509803921, "acc_stderr": 0.027044621719474082, "acc_norm": 0.8186274509803921, "acc_norm_stderr": 0.027044621719474082 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8016877637130801, "acc_stderr": 0.025955020841621112, "acc_norm": 0.8016877637130801, "acc_norm_stderr": 0.025955020841621112 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6816143497757847, "acc_stderr": 0.03126580522513713, "acc_norm": 0.6816143497757847, "acc_norm_stderr": 0.03126580522513713 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7938931297709924, "acc_stderr": 0.03547771004159465, "acc_norm": 0.7938931297709924, "acc_norm_stderr": 0.03547771004159465 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8016528925619835, "acc_stderr": 0.03640118271990946, "acc_norm": 0.8016528925619835, "acc_norm_stderr": 0.03640118271990946 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7962962962962963, "acc_stderr": 0.03893542518824847, "acc_norm": 0.7962962962962963, "acc_norm_stderr": 0.03893542518824847 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7791411042944786, "acc_stderr": 0.03259177392742178, "acc_norm": 0.7791411042944786, "acc_norm_stderr": 0.03259177392742178 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.49107142857142855, "acc_stderr": 0.04745033255489123, "acc_norm": 0.49107142857142855, "acc_norm_stderr": 0.04745033255489123 }, "harness|hendrycksTest-management|5": { "acc": 0.7864077669902912, "acc_stderr": 0.040580420156460344, "acc_norm": 0.7864077669902912, "acc_norm_stderr": 0.040580420156460344 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8717948717948718, "acc_stderr": 0.02190190511507333, "acc_norm": 0.8717948717948718, "acc_norm_stderr": 0.02190190511507333 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.72, "acc_stderr": 0.045126085985421276, "acc_norm": 0.72, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8173690932311622, "acc_stderr": 0.013816335389973136, "acc_norm": 0.8173690932311622, "acc_norm_stderr": 0.013816335389973136 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7427745664739884, "acc_stderr": 0.023532925431044287, "acc_norm": 0.7427745664739884, "acc_norm_stderr": 0.023532925431044287 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4782122905027933, "acc_stderr": 0.016706617522176136, "acc_norm": 0.4782122905027933, "acc_norm_stderr": 0.016706617522176136 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7124183006535948, "acc_stderr": 0.025917806117147158, "acc_norm": 0.7124183006535948, "acc_norm_stderr": 0.025917806117147158 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7106109324758842, "acc_stderr": 0.025755865922632945, "acc_norm": 0.7106109324758842, "acc_norm_stderr": 0.025755865922632945 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7561728395061729, "acc_stderr": 0.023891879541959614, "acc_norm": 0.7561728395061729, "acc_norm_stderr": 0.023891879541959614 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4716312056737589, "acc_stderr": 0.02977945095730307, "acc_norm": 0.4716312056737589, "acc_norm_stderr": 0.02977945095730307 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46740547588005216, "acc_stderr": 0.01274307294265335, "acc_norm": 0.46740547588005216, "acc_norm_stderr": 0.01274307294265335 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6691176470588235, "acc_stderr": 0.028582709753898445, "acc_norm": 0.6691176470588235, "acc_norm_stderr": 0.028582709753898445 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6666666666666666, "acc_stderr": 0.0190709855896875, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.0190709855896875 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6818181818181818, "acc_stderr": 0.04461272175910509, "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.04461272175910509 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7183673469387755, "acc_stderr": 0.028795185574291296, "acc_norm": 0.7183673469387755, "acc_norm_stderr": 0.028795185574291296 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8407960199004975, "acc_stderr": 0.02587064676616913, "acc_norm": 0.8407960199004975, "acc_norm_stderr": 0.02587064676616913 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.88, "acc_stderr": 0.03265986323710906, "acc_norm": 0.88, "acc_norm_stderr": 0.03265986323710906 }, "harness|hendrycksTest-virology|5": { "acc": 0.536144578313253, "acc_stderr": 0.038823108508905954, "acc_norm": 0.536144578313253, "acc_norm_stderr": 0.038823108508905954 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.847953216374269, "acc_stderr": 0.027539122889061456, "acc_norm": 0.847953216374269, "acc_norm_stderr": 0.027539122889061456 }, "harness|truthfulqa:mc|0": { "mc1": 0.5397796817625459, "mc1_stderr": 0.017448017223960874, "mc2": 0.6783429669391698, "mc2_stderr": 0.015130110237542948 }, "harness|winogrande|5": { "acc": 0.8200473559589582, "acc_stderr": 0.01079646868806868 }, "harness|gsm8k|5": { "acc": 0.6709628506444276, "acc_stderr": 0.01294237560367937 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_GreenNode__GreenNodeLM-7B-v2leo
[ "region:us" ]
2023-12-16T15:14:17+00:00
{"pretty_name": "Evaluation run of GreenNode/GreenNodeLM-7B-v2leo", "dataset_summary": "Dataset automatically created during the evaluation run of model [GreenNode/GreenNodeLM-7B-v2leo](https://huggingface.co/GreenNode/GreenNodeLM-7B-v2leo) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_GreenNode__GreenNodeLM-7B-v2leo\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T15:11:27.236820](https://huggingface.co/datasets/open-llm-leaderboard/details_GreenNode__GreenNodeLM-7B-v2leo/blob/main/results_2023-12-16T15-11-27.236820.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6544526838897918,\n \"acc_stderr\": 0.032061359095590616,\n \"acc_norm\": 0.6547329571965785,\n \"acc_norm_stderr\": 0.03271846530737067,\n \"mc1\": 0.5397796817625459,\n \"mc1_stderr\": 0.017448017223960874,\n \"mc2\": 0.6783429669391698,\n \"mc2_stderr\": 0.015130110237542948\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6697952218430034,\n \"acc_stderr\": 0.013743085603760427,\n \"acc_norm\": 0.6979522184300341,\n \"acc_norm_stderr\": 0.013417519144716413\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7097191794463255,\n \"acc_stderr\": 0.004529642828546397,\n \"acc_norm\": 0.8802031467835093,\n \"acc_norm_stderr\": 0.0032406018831804884\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.047609522856952365,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.047609522856952365\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.04072314811876837,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.04072314811876837\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6842105263157895,\n \"acc_stderr\": 0.0378272898086547,\n \"acc_norm\": 0.6842105263157895,\n \"acc_norm_stderr\": 0.0378272898086547\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.720754716981132,\n \"acc_stderr\": 0.027611163402399715,\n \"acc_norm\": 0.720754716981132,\n \"acc_norm_stderr\": 0.027611163402399715\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7430555555555556,\n \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.7430555555555556,\n \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720683,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720683\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6416184971098265,\n \"acc_stderr\": 0.036563436533531585,\n \"acc_norm\": 0.6416184971098265,\n \"acc_norm_stderr\": 0.036563436533531585\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4019607843137255,\n \"acc_stderr\": 0.04878608714466996,\n \"acc_norm\": 0.4019607843137255,\n \"acc_norm_stderr\": 0.04878608714466996\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.04093601807403326,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.04093601807403326\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5659574468085107,\n \"acc_stderr\": 0.03240038086792747,\n \"acc_norm\": 0.5659574468085107,\n \"acc_norm_stderr\": 0.03240038086792747\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.47368421052631576,\n \"acc_stderr\": 0.046970851366478626,\n \"acc_norm\": 0.47368421052631576,\n \"acc_norm_stderr\": 0.046970851366478626\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6068965517241379,\n \"acc_stderr\": 0.0407032901370707,\n \"acc_norm\": 0.6068965517241379,\n \"acc_norm_stderr\": 0.0407032901370707\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4470899470899471,\n \"acc_stderr\": 0.02560672399577702,\n \"acc_norm\": 0.4470899470899471,\n \"acc_norm_stderr\": 0.02560672399577702\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4523809523809524,\n \"acc_stderr\": 0.044518079590553275,\n \"acc_norm\": 0.4523809523809524,\n \"acc_norm_stderr\": 0.044518079590553275\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7774193548387097,\n \"acc_stderr\": 0.023664216671642518,\n \"acc_norm\": 0.7774193548387097,\n \"acc_norm_stderr\": 0.023664216671642518\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5073891625615764,\n \"acc_stderr\": 0.035176035403610105,\n \"acc_norm\": 0.5073891625615764,\n \"acc_norm_stderr\": 0.035176035403610105\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7636363636363637,\n \"acc_stderr\": 0.03317505930009182,\n \"acc_norm\": 0.7636363636363637,\n \"acc_norm_stderr\": 0.03317505930009182\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.029126522834586818,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.029126522834586818\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8808290155440415,\n \"acc_stderr\": 0.023381935348121427,\n \"acc_norm\": 0.8808290155440415,\n \"acc_norm_stderr\": 0.023381935348121427\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.023901157979402534,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.023901157979402534\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.37407407407407406,\n \"acc_stderr\": 0.029502861128955293,\n \"acc_norm\": 0.37407407407407406,\n \"acc_norm_stderr\": 0.029502861128955293\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7016806722689075,\n \"acc_stderr\": 0.029719142876342853,\n \"acc_norm\": 0.7016806722689075,\n \"acc_norm_stderr\": 0.029719142876342853\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33774834437086093,\n \"acc_stderr\": 0.03861557546255169,\n \"acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.03861557546255169\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8366972477064221,\n \"acc_stderr\": 0.01584825580650155,\n \"acc_norm\": 0.8366972477064221,\n \"acc_norm_stderr\": 0.01584825580650155\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5231481481481481,\n \"acc_stderr\": 0.03406315360711507,\n \"acc_norm\": 0.5231481481481481,\n \"acc_norm_stderr\": 0.03406315360711507\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8186274509803921,\n \"acc_stderr\": 0.027044621719474082,\n \"acc_norm\": 0.8186274509803921,\n \"acc_norm_stderr\": 0.027044621719474082\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8016877637130801,\n \"acc_stderr\": 0.025955020841621112,\n \"acc_norm\": 0.8016877637130801,\n \"acc_norm_stderr\": 0.025955020841621112\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6816143497757847,\n \"acc_stderr\": 0.03126580522513713,\n \"acc_norm\": 0.6816143497757847,\n \"acc_norm_stderr\": 0.03126580522513713\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7938931297709924,\n \"acc_stderr\": 0.03547771004159465,\n \"acc_norm\": 0.7938931297709924,\n \"acc_norm_stderr\": 0.03547771004159465\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8016528925619835,\n \"acc_stderr\": 0.03640118271990946,\n \"acc_norm\": 0.8016528925619835,\n \"acc_norm_stderr\": 0.03640118271990946\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7962962962962963,\n \"acc_stderr\": 0.03893542518824847,\n \"acc_norm\": 0.7962962962962963,\n \"acc_norm_stderr\": 0.03893542518824847\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7791411042944786,\n \"acc_stderr\": 0.03259177392742178,\n \"acc_norm\": 0.7791411042944786,\n \"acc_norm_stderr\": 0.03259177392742178\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.49107142857142855,\n \"acc_stderr\": 0.04745033255489123,\n \"acc_norm\": 0.49107142857142855,\n \"acc_norm_stderr\": 0.04745033255489123\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7864077669902912,\n \"acc_stderr\": 0.040580420156460344,\n \"acc_norm\": 0.7864077669902912,\n \"acc_norm_stderr\": 0.040580420156460344\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8717948717948718,\n \"acc_stderr\": 0.02190190511507333,\n \"acc_norm\": 0.8717948717948718,\n \"acc_norm_stderr\": 0.02190190511507333\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8173690932311622,\n \"acc_stderr\": 0.013816335389973136,\n \"acc_norm\": 0.8173690932311622,\n \"acc_norm_stderr\": 0.013816335389973136\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7427745664739884,\n \"acc_stderr\": 0.023532925431044287,\n \"acc_norm\": 0.7427745664739884,\n \"acc_norm_stderr\": 0.023532925431044287\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4782122905027933,\n \"acc_stderr\": 0.016706617522176136,\n \"acc_norm\": 0.4782122905027933,\n \"acc_norm_stderr\": 0.016706617522176136\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7124183006535948,\n \"acc_stderr\": 0.025917806117147158,\n \"acc_norm\": 0.7124183006535948,\n \"acc_norm_stderr\": 0.025917806117147158\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7106109324758842,\n \"acc_stderr\": 0.025755865922632945,\n \"acc_norm\": 0.7106109324758842,\n \"acc_norm_stderr\": 0.025755865922632945\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7561728395061729,\n \"acc_stderr\": 0.023891879541959614,\n \"acc_norm\": 0.7561728395061729,\n \"acc_norm_stderr\": 0.023891879541959614\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4716312056737589,\n \"acc_stderr\": 0.02977945095730307,\n \"acc_norm\": 0.4716312056737589,\n \"acc_norm_stderr\": 0.02977945095730307\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46740547588005216,\n \"acc_stderr\": 0.01274307294265335,\n \"acc_norm\": 0.46740547588005216,\n \"acc_norm_stderr\": 0.01274307294265335\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6691176470588235,\n \"acc_stderr\": 0.028582709753898445,\n \"acc_norm\": 0.6691176470588235,\n \"acc_norm_stderr\": 0.028582709753898445\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.0190709855896875,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.0190709855896875\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6818181818181818,\n \"acc_stderr\": 0.04461272175910509,\n \"acc_norm\": 0.6818181818181818,\n \"acc_norm_stderr\": 0.04461272175910509\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7183673469387755,\n \"acc_stderr\": 0.028795185574291296,\n \"acc_norm\": 0.7183673469387755,\n \"acc_norm_stderr\": 0.028795185574291296\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.02587064676616913,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.02587064676616913\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.88,\n \"acc_stderr\": 0.03265986323710906,\n \"acc_norm\": 0.88,\n \"acc_norm_stderr\": 0.03265986323710906\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.536144578313253,\n \"acc_stderr\": 0.038823108508905954,\n \"acc_norm\": 0.536144578313253,\n \"acc_norm_stderr\": 0.038823108508905954\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.847953216374269,\n \"acc_stderr\": 0.027539122889061456,\n \"acc_norm\": 0.847953216374269,\n \"acc_norm_stderr\": 0.027539122889061456\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5397796817625459,\n \"mc1_stderr\": 0.017448017223960874,\n \"mc2\": 0.6783429669391698,\n \"mc2_stderr\": 0.015130110237542948\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8200473559589582,\n \"acc_stderr\": 0.01079646868806868\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6709628506444276,\n \"acc_stderr\": 0.01294237560367937\n }\n}\n```", "repo_url": "https://huggingface.co/GreenNode/GreenNodeLM-7B-v2leo", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|arc:challenge|25_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|gsm8k|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hellaswag|10_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T15-11-27.236820.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["**/details_harness|winogrande|5_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T15-11-27.236820.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T15_11_27.236820", "path": ["results_2023-12-16T15-11-27.236820.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T15-11-27.236820.parquet"]}]}]}
2023-12-16T15:15:00+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of GreenNode/GreenNodeLM-7B-v2leo Dataset automatically created during the evaluation run of model GreenNode/GreenNodeLM-7B-v2leo on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T15:11:27.236820(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of GreenNode/GreenNodeLM-7B-v2leo\n\n\n\nDataset automatically created during the evaluation run of model GreenNode/GreenNodeLM-7B-v2leo on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T15:11:27.236820(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of GreenNode/GreenNodeLM-7B-v2leo\n\n\n\nDataset automatically created during the evaluation run of model GreenNode/GreenNodeLM-7B-v2leo on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T15:11:27.236820(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 189, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of GreenNode/GreenNodeLM-7B-v2leo\n\n\n\nDataset automatically created during the evaluation run of model GreenNode/GreenNodeLM-7B-v2leo on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T15:11:27.236820(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
e35c3853836f9049d91817bdd7fe3a6922f0e32d
# Dataset Card for Evaluation run of luffycodes/vicuna-class-tutor-7b-ep3 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [luffycodes/vicuna-class-tutor-7b-ep3](https://huggingface.co/luffycodes/vicuna-class-tutor-7b-ep3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_luffycodes__vicuna-class-tutor-7b-ep3", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T15:17:26.038006](https://huggingface.co/datasets/open-llm-leaderboard/details_luffycodes__vicuna-class-tutor-7b-ep3/blob/main/results_2023-12-16T15-17-26.038006.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5093628977197876, "acc_stderr": 0.03403952413237213, "acc_norm": 0.5178445415003057, "acc_norm_stderr": 0.03487617043567181, "mc1": 0.34516523867809057, "mc1_stderr": 0.01664310331927494, "mc2": 0.5229776673336511, "mc2_stderr": 0.01562818115236943 }, "harness|arc:challenge|25": { "acc": 0.4803754266211604, "acc_stderr": 0.01460013207594709, "acc_norm": 0.5213310580204779, "acc_norm_stderr": 0.014598087973127106 }, "harness|hellaswag|10": { "acc": 0.5909181437960566, "acc_stderr": 0.004906595857916761, "acc_norm": 0.7807209719179447, "acc_norm_stderr": 0.004129124597995314 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.48148148148148145, "acc_stderr": 0.043163785995113245, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.043163785995113245 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.4934210526315789, "acc_stderr": 0.040685900502249704, "acc_norm": 0.4934210526315789, "acc_norm_stderr": 0.040685900502249704 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5547169811320755, "acc_stderr": 0.03058805297427065, "acc_norm": 0.5547169811320755, "acc_norm_stderr": 0.03058805297427065 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5, "acc_stderr": 0.04181210050035455, "acc_norm": 0.5, "acc_norm_stderr": 0.04181210050035455 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.35, "acc_stderr": 0.04793724854411019, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411019 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.4393063583815029, "acc_stderr": 0.037842719328874674, "acc_norm": 0.4393063583815029, "acc_norm_stderr": 0.037842719328874674 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.17647058823529413, "acc_stderr": 0.0379328118530781, "acc_norm": 0.17647058823529413, "acc_norm_stderr": 0.0379328118530781 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.64, "acc_stderr": 0.04824181513244218, "acc_norm": 0.64, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4553191489361702, "acc_stderr": 0.03255525359340355, "acc_norm": 0.4553191489361702, "acc_norm_stderr": 0.03255525359340355 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.044346007015849245, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.044346007015849245 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.46206896551724136, "acc_stderr": 0.041546596717075474, "acc_norm": 0.46206896551724136, "acc_norm_stderr": 0.041546596717075474 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.29894179894179895, "acc_stderr": 0.023577604791655816, "acc_norm": 0.29894179894179895, "acc_norm_stderr": 0.023577604791655816 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.36507936507936506, "acc_stderr": 0.04306241259127153, "acc_norm": 0.36507936507936506, "acc_norm_stderr": 0.04306241259127153 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.5645161290322581, "acc_stderr": 0.02820622559150275, "acc_norm": 0.5645161290322581, "acc_norm_stderr": 0.02820622559150275 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.35467980295566504, "acc_stderr": 0.0336612448905145, "acc_norm": 0.35467980295566504, "acc_norm_stderr": 0.0336612448905145 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6424242424242425, "acc_stderr": 0.037425970438065864, "acc_norm": 0.6424242424242425, "acc_norm_stderr": 0.037425970438065864 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.6363636363636364, "acc_stderr": 0.03427308652999934, "acc_norm": 0.6363636363636364, "acc_norm_stderr": 0.03427308652999934 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7512953367875648, "acc_stderr": 0.031195840877700286, "acc_norm": 0.7512953367875648, "acc_norm_stderr": 0.031195840877700286 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.4794871794871795, "acc_stderr": 0.025329663163489943, "acc_norm": 0.4794871794871795, "acc_norm_stderr": 0.025329663163489943 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.23333333333333334, "acc_stderr": 0.02578787422095932, "acc_norm": 0.23333333333333334, "acc_norm_stderr": 0.02578787422095932 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.4789915966386555, "acc_stderr": 0.03244980849990029, "acc_norm": 0.4789915966386555, "acc_norm_stderr": 0.03244980849990029 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.304635761589404, "acc_stderr": 0.03757949922943343, "acc_norm": 0.304635761589404, "acc_norm_stderr": 0.03757949922943343 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7027522935779816, "acc_stderr": 0.019595707224643526, "acc_norm": 0.7027522935779816, "acc_norm_stderr": 0.019595707224643526 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4027777777777778, "acc_stderr": 0.03344887382997866, "acc_norm": 0.4027777777777778, "acc_norm_stderr": 0.03344887382997866 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7205882352941176, "acc_stderr": 0.03149328104507957, "acc_norm": 0.7205882352941176, "acc_norm_stderr": 0.03149328104507957 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7257383966244726, "acc_stderr": 0.029041333510598025, "acc_norm": 0.7257383966244726, "acc_norm_stderr": 0.029041333510598025 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6143497757847534, "acc_stderr": 0.03266842214289201, "acc_norm": 0.6143497757847534, "acc_norm_stderr": 0.03266842214289201 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6259541984732825, "acc_stderr": 0.042438692422305246, "acc_norm": 0.6259541984732825, "acc_norm_stderr": 0.042438692422305246 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6033057851239669, "acc_stderr": 0.044658697805310094, "acc_norm": 0.6033057851239669, "acc_norm_stderr": 0.044658697805310094 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.5833333333333334, "acc_stderr": 0.04766075165356462, "acc_norm": 0.5833333333333334, "acc_norm_stderr": 0.04766075165356462 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.588957055214724, "acc_stderr": 0.038656978537853624, "acc_norm": 0.588957055214724, "acc_norm_stderr": 0.038656978537853624 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4375, "acc_stderr": 0.04708567521880525, "acc_norm": 0.4375, "acc_norm_stderr": 0.04708567521880525 }, "harness|hendrycksTest-management|5": { "acc": 0.6893203883495146, "acc_stderr": 0.04582124160161549, "acc_norm": 0.6893203883495146, "acc_norm_stderr": 0.04582124160161549 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7606837606837606, "acc_stderr": 0.027951826808924333, "acc_norm": 0.7606837606837606, "acc_norm_stderr": 0.027951826808924333 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.6781609195402298, "acc_stderr": 0.016706381415057894, "acc_norm": 0.6781609195402298, "acc_norm_stderr": 0.016706381415057894 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5289017341040463, "acc_stderr": 0.026874085883518348, "acc_norm": 0.5289017341040463, "acc_norm_stderr": 0.026874085883518348 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.24916201117318434, "acc_stderr": 0.014465893829859926, "acc_norm": 0.24916201117318434, "acc_norm_stderr": 0.014465893829859926 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5751633986928104, "acc_stderr": 0.028304576673141107, "acc_norm": 0.5751633986928104, "acc_norm_stderr": 0.028304576673141107 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6012861736334405, "acc_stderr": 0.0278093225857745, "acc_norm": 0.6012861736334405, "acc_norm_stderr": 0.0278093225857745 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6018518518518519, "acc_stderr": 0.027237415094592484, "acc_norm": 0.6018518518518519, "acc_norm_stderr": 0.027237415094592484 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.375886524822695, "acc_stderr": 0.028893955412115886, "acc_norm": 0.375886524822695, "acc_norm_stderr": 0.028893955412115886 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.3767926988265971, "acc_stderr": 0.012376459593894402, "acc_norm": 0.3767926988265971, "acc_norm_stderr": 0.012376459593894402 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5330882352941176, "acc_stderr": 0.030306257722468314, "acc_norm": 0.5330882352941176, "acc_norm_stderr": 0.030306257722468314 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.49019607843137253, "acc_stderr": 0.020223946005074305, "acc_norm": 0.49019607843137253, "acc_norm_stderr": 0.020223946005074305 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6272727272727273, "acc_stderr": 0.04631381319425465, "acc_norm": 0.6272727272727273, "acc_norm_stderr": 0.04631381319425465 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6204081632653061, "acc_stderr": 0.031067211262872485, "acc_norm": 0.6204081632653061, "acc_norm_stderr": 0.031067211262872485 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6965174129353234, "acc_stderr": 0.032510068164586174, "acc_norm": 0.6965174129353234, "acc_norm_stderr": 0.032510068164586174 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.74, "acc_stderr": 0.04408440022768079, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768079 }, "harness|hendrycksTest-virology|5": { "acc": 0.41566265060240964, "acc_stderr": 0.038367221765980515, "acc_norm": 0.41566265060240964, "acc_norm_stderr": 0.038367221765980515 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7192982456140351, "acc_stderr": 0.034462962170884265, "acc_norm": 0.7192982456140351, "acc_norm_stderr": 0.034462962170884265 }, "harness|truthfulqa:mc|0": { "mc1": 0.34516523867809057, "mc1_stderr": 0.01664310331927494, "mc2": 0.5229776673336511, "mc2_stderr": 0.01562818115236943 }, "harness|winogrande|5": { "acc": 0.7119179163378059, "acc_stderr": 0.012727884724248115 }, "harness|gsm8k|5": { "acc": 0.037149355572403335, "acc_stderr": 0.005209516283073778 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_luffycodes__vicuna-class-tutor-7b-ep3
[ "region:us" ]
2023-12-16T15:20:21+00:00
{"pretty_name": "Evaluation run of luffycodes/vicuna-class-tutor-7b-ep3", "dataset_summary": "Dataset automatically created during the evaluation run of model [luffycodes/vicuna-class-tutor-7b-ep3](https://huggingface.co/luffycodes/vicuna-class-tutor-7b-ep3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_luffycodes__vicuna-class-tutor-7b-ep3\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T15:17:26.038006](https://huggingface.co/datasets/open-llm-leaderboard/details_luffycodes__vicuna-class-tutor-7b-ep3/blob/main/results_2023-12-16T15-17-26.038006.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5093628977197876,\n \"acc_stderr\": 0.03403952413237213,\n \"acc_norm\": 0.5178445415003057,\n \"acc_norm_stderr\": 0.03487617043567181,\n \"mc1\": 0.34516523867809057,\n \"mc1_stderr\": 0.01664310331927494,\n \"mc2\": 0.5229776673336511,\n \"mc2_stderr\": 0.01562818115236943\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.4803754266211604,\n \"acc_stderr\": 0.01460013207594709,\n \"acc_norm\": 0.5213310580204779,\n \"acc_norm_stderr\": 0.014598087973127106\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5909181437960566,\n \"acc_stderr\": 0.004906595857916761,\n \"acc_norm\": 0.7807209719179447,\n \"acc_norm_stderr\": 0.004129124597995314\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.48148148148148145,\n \"acc_stderr\": 0.043163785995113245,\n \"acc_norm\": 0.48148148148148145,\n \"acc_norm_stderr\": 0.043163785995113245\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.4934210526315789,\n \"acc_stderr\": 0.040685900502249704,\n \"acc_norm\": 0.4934210526315789,\n \"acc_norm_stderr\": 0.040685900502249704\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5547169811320755,\n \"acc_stderr\": 0.03058805297427065,\n \"acc_norm\": 0.5547169811320755,\n \"acc_norm_stderr\": 0.03058805297427065\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.04181210050035455,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.04181210050035455\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411019,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411019\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.4393063583815029,\n \"acc_stderr\": 0.037842719328874674,\n \"acc_norm\": 0.4393063583815029,\n \"acc_norm_stderr\": 0.037842719328874674\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.17647058823529413,\n \"acc_stderr\": 0.0379328118530781,\n \"acc_norm\": 0.17647058823529413,\n \"acc_norm_stderr\": 0.0379328118530781\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.4553191489361702,\n \"acc_stderr\": 0.03255525359340355,\n \"acc_norm\": 0.4553191489361702,\n \"acc_norm_stderr\": 0.03255525359340355\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.044346007015849245,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.044346007015849245\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.46206896551724136,\n \"acc_stderr\": 0.041546596717075474,\n \"acc_norm\": 0.46206896551724136,\n \"acc_norm_stderr\": 0.041546596717075474\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.29894179894179895,\n \"acc_stderr\": 0.023577604791655816,\n \"acc_norm\": 0.29894179894179895,\n \"acc_norm_stderr\": 0.023577604791655816\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.36507936507936506,\n \"acc_stderr\": 0.04306241259127153,\n \"acc_norm\": 0.36507936507936506,\n \"acc_norm_stderr\": 0.04306241259127153\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.5645161290322581,\n \"acc_stderr\": 0.02820622559150275,\n \"acc_norm\": 0.5645161290322581,\n \"acc_norm_stderr\": 0.02820622559150275\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.35467980295566504,\n \"acc_stderr\": 0.0336612448905145,\n \"acc_norm\": 0.35467980295566504,\n \"acc_norm_stderr\": 0.0336612448905145\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.6424242424242425,\n \"acc_stderr\": 0.037425970438065864,\n \"acc_norm\": 0.6424242424242425,\n \"acc_norm_stderr\": 0.037425970438065864\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.6363636363636364,\n \"acc_stderr\": 0.03427308652999934,\n \"acc_norm\": 0.6363636363636364,\n \"acc_norm_stderr\": 0.03427308652999934\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.7512953367875648,\n \"acc_stderr\": 0.031195840877700286,\n \"acc_norm\": 0.7512953367875648,\n \"acc_norm_stderr\": 0.031195840877700286\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.4794871794871795,\n \"acc_stderr\": 0.025329663163489943,\n \"acc_norm\": 0.4794871794871795,\n \"acc_norm_stderr\": 0.025329663163489943\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.23333333333333334,\n \"acc_stderr\": 0.02578787422095932,\n \"acc_norm\": 0.23333333333333334,\n \"acc_norm_stderr\": 0.02578787422095932\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.4789915966386555,\n \"acc_stderr\": 0.03244980849990029,\n \"acc_norm\": 0.4789915966386555,\n \"acc_norm_stderr\": 0.03244980849990029\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.304635761589404,\n \"acc_stderr\": 0.03757949922943343,\n \"acc_norm\": 0.304635761589404,\n \"acc_norm_stderr\": 0.03757949922943343\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7027522935779816,\n \"acc_stderr\": 0.019595707224643526,\n \"acc_norm\": 0.7027522935779816,\n \"acc_norm_stderr\": 0.019595707224643526\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4027777777777778,\n \"acc_stderr\": 0.03344887382997866,\n \"acc_norm\": 0.4027777777777778,\n \"acc_norm_stderr\": 0.03344887382997866\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7205882352941176,\n \"acc_stderr\": 0.03149328104507957,\n \"acc_norm\": 0.7205882352941176,\n \"acc_norm_stderr\": 0.03149328104507957\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7257383966244726,\n \"acc_stderr\": 0.029041333510598025,\n \"acc_norm\": 0.7257383966244726,\n \"acc_norm_stderr\": 0.029041333510598025\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6143497757847534,\n \"acc_stderr\": 0.03266842214289201,\n \"acc_norm\": 0.6143497757847534,\n \"acc_norm_stderr\": 0.03266842214289201\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6259541984732825,\n \"acc_stderr\": 0.042438692422305246,\n \"acc_norm\": 0.6259541984732825,\n \"acc_norm_stderr\": 0.042438692422305246\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6033057851239669,\n \"acc_stderr\": 0.044658697805310094,\n \"acc_norm\": 0.6033057851239669,\n \"acc_norm_stderr\": 0.044658697805310094\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.5833333333333334,\n \"acc_stderr\": 0.04766075165356462,\n \"acc_norm\": 0.5833333333333334,\n \"acc_norm_stderr\": 0.04766075165356462\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.588957055214724,\n \"acc_stderr\": 0.038656978537853624,\n \"acc_norm\": 0.588957055214724,\n \"acc_norm_stderr\": 0.038656978537853624\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4375,\n \"acc_stderr\": 0.04708567521880525,\n \"acc_norm\": 0.4375,\n \"acc_norm_stderr\": 0.04708567521880525\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6893203883495146,\n \"acc_stderr\": 0.04582124160161549,\n \"acc_norm\": 0.6893203883495146,\n \"acc_norm_stderr\": 0.04582124160161549\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7606837606837606,\n \"acc_stderr\": 0.027951826808924333,\n \"acc_norm\": 0.7606837606837606,\n \"acc_norm_stderr\": 0.027951826808924333\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6781609195402298,\n \"acc_stderr\": 0.016706381415057894,\n \"acc_norm\": 0.6781609195402298,\n \"acc_norm_stderr\": 0.016706381415057894\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.5289017341040463,\n \"acc_stderr\": 0.026874085883518348,\n \"acc_norm\": 0.5289017341040463,\n \"acc_norm_stderr\": 0.026874085883518348\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.24916201117318434,\n \"acc_stderr\": 0.014465893829859926,\n \"acc_norm\": 0.24916201117318434,\n \"acc_norm_stderr\": 0.014465893829859926\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.5751633986928104,\n \"acc_stderr\": 0.028304576673141107,\n \"acc_norm\": 0.5751633986928104,\n \"acc_norm_stderr\": 0.028304576673141107\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6012861736334405,\n \"acc_stderr\": 0.0278093225857745,\n \"acc_norm\": 0.6012861736334405,\n \"acc_norm_stderr\": 0.0278093225857745\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6018518518518519,\n \"acc_stderr\": 0.027237415094592484,\n \"acc_norm\": 0.6018518518518519,\n \"acc_norm_stderr\": 0.027237415094592484\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.375886524822695,\n \"acc_stderr\": 0.028893955412115886,\n \"acc_norm\": 0.375886524822695,\n \"acc_norm_stderr\": 0.028893955412115886\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3767926988265971,\n \"acc_stderr\": 0.012376459593894402,\n \"acc_norm\": 0.3767926988265971,\n \"acc_norm_stderr\": 0.012376459593894402\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5330882352941176,\n \"acc_stderr\": 0.030306257722468314,\n \"acc_norm\": 0.5330882352941176,\n \"acc_norm_stderr\": 0.030306257722468314\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.49019607843137253,\n \"acc_stderr\": 0.020223946005074305,\n \"acc_norm\": 0.49019607843137253,\n \"acc_norm_stderr\": 0.020223946005074305\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6272727272727273,\n \"acc_stderr\": 0.04631381319425465,\n \"acc_norm\": 0.6272727272727273,\n \"acc_norm_stderr\": 0.04631381319425465\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6204081632653061,\n \"acc_stderr\": 0.031067211262872485,\n \"acc_norm\": 0.6204081632653061,\n \"acc_norm_stderr\": 0.031067211262872485\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6965174129353234,\n \"acc_stderr\": 0.032510068164586174,\n \"acc_norm\": 0.6965174129353234,\n \"acc_norm_stderr\": 0.032510068164586174\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768079,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768079\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.41566265060240964,\n \"acc_stderr\": 0.038367221765980515,\n \"acc_norm\": 0.41566265060240964,\n \"acc_norm_stderr\": 0.038367221765980515\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7192982456140351,\n \"acc_stderr\": 0.034462962170884265,\n \"acc_norm\": 0.7192982456140351,\n \"acc_norm_stderr\": 0.034462962170884265\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.34516523867809057,\n \"mc1_stderr\": 0.01664310331927494,\n \"mc2\": 0.5229776673336511,\n \"mc2_stderr\": 0.01562818115236943\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7119179163378059,\n \"acc_stderr\": 0.012727884724248115\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.037149355572403335,\n \"acc_stderr\": 0.005209516283073778\n }\n}\n```", "repo_url": "https://huggingface.co/luffycodes/vicuna-class-tutor-7b-ep3", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|arc:challenge|25_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|gsm8k|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hellaswag|10_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T15-17-26.038006.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["**/details_harness|winogrande|5_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T15-17-26.038006.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T15_17_26.038006", "path": ["results_2023-12-16T15-17-26.038006.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T15-17-26.038006.parquet"]}]}]}
2023-12-16T15:21:04+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of luffycodes/vicuna-class-tutor-7b-ep3 Dataset automatically created during the evaluation run of model luffycodes/vicuna-class-tutor-7b-ep3 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T15:17:26.038006(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of luffycodes/vicuna-class-tutor-7b-ep3\n\n\n\nDataset automatically created during the evaluation run of model luffycodes/vicuna-class-tutor-7b-ep3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T15:17:26.038006(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of luffycodes/vicuna-class-tutor-7b-ep3\n\n\n\nDataset automatically created during the evaluation run of model luffycodes/vicuna-class-tutor-7b-ep3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T15:17:26.038006(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 195, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of luffycodes/vicuna-class-tutor-7b-ep3\n\n\n\nDataset automatically created during the evaluation run of model luffycodes/vicuna-class-tutor-7b-ep3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T15:17:26.038006(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
24761c537797fbd9908ee53194e1d2f1fbcbd565
# Dataset Card for Evaluation run of mncai/agiin-11.1B-v0.0 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [mncai/agiin-11.1B-v0.0](https://huggingface.co/mncai/agiin-11.1B-v0.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_mncai__agiin-11.1B-v0.0", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T15:20:44.774696](https://huggingface.co/datasets/open-llm-leaderboard/details_mncai__agiin-11.1B-v0.0/blob/main/results_2023-12-16T15-20-44.774696.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6488802179534787, "acc_stderr": 0.0321079939080906, "acc_norm": 0.6539001040951737, "acc_norm_stderr": 0.03274282477493687, "mc1": 0.5104039167686658, "mc1_stderr": 0.017499711430249268, "mc2": 0.6767350158422528, "mc2_stderr": 0.015433642831645542 }, "harness|arc:challenge|25": { "acc": 0.6245733788395904, "acc_stderr": 0.014150631435111726, "acc_norm": 0.6732081911262798, "acc_norm_stderr": 0.013706665975587333 }, "harness|hellaswag|10": { "acc": 0.6907986456881099, "acc_stderr": 0.004612198061600092, "acc_norm": 0.8634734116709819, "acc_norm_stderr": 0.003426451744507847 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.047609522856952365, "acc_norm": 0.34, "acc_norm_stderr": 0.047609522856952365 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6074074074074074, "acc_stderr": 0.0421850621536888, "acc_norm": 0.6074074074074074, "acc_norm_stderr": 0.0421850621536888 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7039473684210527, "acc_stderr": 0.03715062154998904, "acc_norm": 0.7039473684210527, "acc_norm_stderr": 0.03715062154998904 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.64, "acc_stderr": 0.04824181513244218, "acc_norm": 0.64, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7094339622641509, "acc_stderr": 0.027943219989337142, "acc_norm": 0.7094339622641509, "acc_norm_stderr": 0.027943219989337142 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7638888888888888, "acc_stderr": 0.03551446610810826, "acc_norm": 0.7638888888888888, "acc_norm_stderr": 0.03551446610810826 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6473988439306358, "acc_stderr": 0.036430371689585475, "acc_norm": 0.6473988439306358, "acc_norm_stderr": 0.036430371689585475 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.43137254901960786, "acc_stderr": 0.04928099597287534, "acc_norm": 0.43137254901960786, "acc_norm_stderr": 0.04928099597287534 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.042923469599092816, "acc_norm": 0.76, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5659574468085107, "acc_stderr": 0.03240038086792747, "acc_norm": 0.5659574468085107, "acc_norm_stderr": 0.03240038086792747 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5087719298245614, "acc_stderr": 0.04702880432049615, "acc_norm": 0.5087719298245614, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6, "acc_stderr": 0.040824829046386284, "acc_norm": 0.6, "acc_norm_stderr": 0.040824829046386284 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.43386243386243384, "acc_stderr": 0.025525034382474894, "acc_norm": 0.43386243386243384, "acc_norm_stderr": 0.025525034382474894 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4444444444444444, "acc_stderr": 0.04444444444444449, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.04444444444444449 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.38, "acc_stderr": 0.04878317312145633, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145633 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7806451612903226, "acc_stderr": 0.023540799358723295, "acc_norm": 0.7806451612903226, "acc_norm_stderr": 0.023540799358723295 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5073891625615764, "acc_stderr": 0.035176035403610105, "acc_norm": 0.5073891625615764, "acc_norm_stderr": 0.035176035403610105 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7757575757575758, "acc_stderr": 0.03256866661681102, "acc_norm": 0.7757575757575758, "acc_norm_stderr": 0.03256866661681102 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.803030303030303, "acc_stderr": 0.028335609732463362, "acc_norm": 0.803030303030303, "acc_norm_stderr": 0.028335609732463362 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8860103626943006, "acc_stderr": 0.022935144053919443, "acc_norm": 0.8860103626943006, "acc_norm_stderr": 0.022935144053919443 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.676923076923077, "acc_stderr": 0.02371088850197057, "acc_norm": 0.676923076923077, "acc_norm_stderr": 0.02371088850197057 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34814814814814815, "acc_stderr": 0.029045600290616255, "acc_norm": 0.34814814814814815, "acc_norm_stderr": 0.029045600290616255 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7184873949579832, "acc_stderr": 0.029213549414372177, "acc_norm": 0.7184873949579832, "acc_norm_stderr": 0.029213549414372177 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3443708609271523, "acc_stderr": 0.038796870240733264, "acc_norm": 0.3443708609271523, "acc_norm_stderr": 0.038796870240733264 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8440366972477065, "acc_stderr": 0.015555802713590172, "acc_norm": 0.8440366972477065, "acc_norm_stderr": 0.015555802713590172 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5555555555555556, "acc_stderr": 0.03388857118502325, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.03388857118502325 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8431372549019608, "acc_stderr": 0.025524722324553346, "acc_norm": 0.8431372549019608, "acc_norm_stderr": 0.025524722324553346 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8143459915611815, "acc_stderr": 0.025310495376944853, "acc_norm": 0.8143459915611815, "acc_norm_stderr": 0.025310495376944853 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6995515695067265, "acc_stderr": 0.030769352008229143, "acc_norm": 0.6995515695067265, "acc_norm_stderr": 0.030769352008229143 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7862595419847328, "acc_stderr": 0.0359546161177469, "acc_norm": 0.7862595419847328, "acc_norm_stderr": 0.0359546161177469 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7520661157024794, "acc_stderr": 0.03941897526516302, "acc_norm": 0.7520661157024794, "acc_norm_stderr": 0.03941897526516302 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8148148148148148, "acc_stderr": 0.03755265865037181, "acc_norm": 0.8148148148148148, "acc_norm_stderr": 0.03755265865037181 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7791411042944786, "acc_stderr": 0.03259177392742178, "acc_norm": 0.7791411042944786, "acc_norm_stderr": 0.03259177392742178 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5089285714285714, "acc_stderr": 0.04745033255489123, "acc_norm": 0.5089285714285714, "acc_norm_stderr": 0.04745033255489123 }, "harness|hendrycksTest-management|5": { "acc": 0.7378640776699029, "acc_stderr": 0.043546310772605956, "acc_norm": 0.7378640776699029, "acc_norm_stderr": 0.043546310772605956 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8589743589743589, "acc_stderr": 0.022801382534597528, "acc_norm": 0.8589743589743589, "acc_norm_stderr": 0.022801382534597528 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8212005108556832, "acc_stderr": 0.013702643715368985, "acc_norm": 0.8212005108556832, "acc_norm_stderr": 0.013702643715368985 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7196531791907514, "acc_stderr": 0.024182427496577615, "acc_norm": 0.7196531791907514, "acc_norm_stderr": 0.024182427496577615 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4011173184357542, "acc_stderr": 0.016392221899407082, "acc_norm": 0.4011173184357542, "acc_norm_stderr": 0.016392221899407082 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7222222222222222, "acc_stderr": 0.025646863097137897, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.025646863097137897 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7234726688102894, "acc_stderr": 0.02540383297817961, "acc_norm": 0.7234726688102894, "acc_norm_stderr": 0.02540383297817961 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7407407407407407, "acc_stderr": 0.024383665531035457, "acc_norm": 0.7407407407407407, "acc_norm_stderr": 0.024383665531035457 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4787234042553192, "acc_stderr": 0.029800481645628693, "acc_norm": 0.4787234042553192, "acc_norm_stderr": 0.029800481645628693 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4641460234680574, "acc_stderr": 0.012737361318730583, "acc_norm": 0.4641460234680574, "acc_norm_stderr": 0.012737361318730583 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6727941176470589, "acc_stderr": 0.02850145286039656, "acc_norm": 0.6727941176470589, "acc_norm_stderr": 0.02850145286039656 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6683006535947712, "acc_stderr": 0.019047485239360378, "acc_norm": 0.6683006535947712, "acc_norm_stderr": 0.019047485239360378 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7061224489795919, "acc_stderr": 0.02916273841024977, "acc_norm": 0.7061224489795919, "acc_norm_stderr": 0.02916273841024977 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8557213930348259, "acc_stderr": 0.024845753212306046, "acc_norm": 0.8557213930348259, "acc_norm_stderr": 0.024845753212306046 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.89, "acc_stderr": 0.03144660377352203, "acc_norm": 0.89, "acc_norm_stderr": 0.03144660377352203 }, "harness|hendrycksTest-virology|5": { "acc": 0.5301204819277109, "acc_stderr": 0.03885425420866767, "acc_norm": 0.5301204819277109, "acc_norm_stderr": 0.03885425420866767 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8362573099415205, "acc_stderr": 0.028380919596145866, "acc_norm": 0.8362573099415205, "acc_norm_stderr": 0.028380919596145866 }, "harness|truthfulqa:mc|0": { "mc1": 0.5104039167686658, "mc1_stderr": 0.017499711430249268, "mc2": 0.6767350158422528, "mc2_stderr": 0.015433642831645542 }, "harness|winogrande|5": { "acc": 0.7884767166535123, "acc_stderr": 0.01147774768422318 }, "harness|gsm8k|5": { "acc": 0.43442001516300227, "acc_stderr": 0.013653507211411406 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_mncai__agiin-11.1B-v0.0
[ "region:us" ]
2023-12-16T15:23:39+00:00
{"pretty_name": "Evaluation run of mncai/agiin-11.1B-v0.0", "dataset_summary": "Dataset automatically created during the evaluation run of model [mncai/agiin-11.1B-v0.0](https://huggingface.co/mncai/agiin-11.1B-v0.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_mncai__agiin-11.1B-v0.0\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T15:20:44.774696](https://huggingface.co/datasets/open-llm-leaderboard/details_mncai__agiin-11.1B-v0.0/blob/main/results_2023-12-16T15-20-44.774696.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6488802179534787,\n \"acc_stderr\": 0.0321079939080906,\n \"acc_norm\": 0.6539001040951737,\n \"acc_norm_stderr\": 0.03274282477493687,\n \"mc1\": 0.5104039167686658,\n \"mc1_stderr\": 0.017499711430249268,\n \"mc2\": 0.6767350158422528,\n \"mc2_stderr\": 0.015433642831645542\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6245733788395904,\n \"acc_stderr\": 0.014150631435111726,\n \"acc_norm\": 0.6732081911262798,\n \"acc_norm_stderr\": 0.013706665975587333\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6907986456881099,\n \"acc_stderr\": 0.004612198061600092,\n \"acc_norm\": 0.8634734116709819,\n \"acc_norm_stderr\": 0.003426451744507847\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.047609522856952365,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.047609522856952365\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6074074074074074,\n \"acc_stderr\": 0.0421850621536888,\n \"acc_norm\": 0.6074074074074074,\n \"acc_norm_stderr\": 0.0421850621536888\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7039473684210527,\n \"acc_stderr\": 0.03715062154998904,\n \"acc_norm\": 0.7039473684210527,\n \"acc_norm_stderr\": 0.03715062154998904\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7094339622641509,\n \"acc_stderr\": 0.027943219989337142,\n \"acc_norm\": 0.7094339622641509,\n \"acc_norm_stderr\": 0.027943219989337142\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7638888888888888,\n \"acc_stderr\": 0.03551446610810826,\n \"acc_norm\": 0.7638888888888888,\n \"acc_norm_stderr\": 0.03551446610810826\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6473988439306358,\n \"acc_stderr\": 0.036430371689585475,\n \"acc_norm\": 0.6473988439306358,\n \"acc_norm_stderr\": 0.036430371689585475\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.43137254901960786,\n \"acc_stderr\": 0.04928099597287534,\n \"acc_norm\": 0.43137254901960786,\n \"acc_norm_stderr\": 0.04928099597287534\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5659574468085107,\n \"acc_stderr\": 0.03240038086792747,\n \"acc_norm\": 0.5659574468085107,\n \"acc_norm_stderr\": 0.03240038086792747\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5087719298245614,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.5087719298245614,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.040824829046386284,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.040824829046386284\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.43386243386243384,\n \"acc_stderr\": 0.025525034382474894,\n \"acc_norm\": 0.43386243386243384,\n \"acc_norm_stderr\": 0.025525034382474894\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.04444444444444449,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.04444444444444449\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145633,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145633\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7806451612903226,\n \"acc_stderr\": 0.023540799358723295,\n \"acc_norm\": 0.7806451612903226,\n \"acc_norm_stderr\": 0.023540799358723295\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5073891625615764,\n \"acc_stderr\": 0.035176035403610105,\n \"acc_norm\": 0.5073891625615764,\n \"acc_norm_stderr\": 0.035176035403610105\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.03256866661681102,\n \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.03256866661681102\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.803030303030303,\n \"acc_stderr\": 0.028335609732463362,\n \"acc_norm\": 0.803030303030303,\n \"acc_norm_stderr\": 0.028335609732463362\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8860103626943006,\n \"acc_stderr\": 0.022935144053919443,\n \"acc_norm\": 0.8860103626943006,\n \"acc_norm_stderr\": 0.022935144053919443\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.676923076923077,\n \"acc_stderr\": 0.02371088850197057,\n \"acc_norm\": 0.676923076923077,\n \"acc_norm_stderr\": 0.02371088850197057\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34814814814814815,\n \"acc_stderr\": 0.029045600290616255,\n \"acc_norm\": 0.34814814814814815,\n \"acc_norm_stderr\": 0.029045600290616255\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7184873949579832,\n \"acc_stderr\": 0.029213549414372177,\n \"acc_norm\": 0.7184873949579832,\n \"acc_norm_stderr\": 0.029213549414372177\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3443708609271523,\n \"acc_stderr\": 0.038796870240733264,\n \"acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.038796870240733264\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8440366972477065,\n \"acc_stderr\": 0.015555802713590172,\n \"acc_norm\": 0.8440366972477065,\n \"acc_norm_stderr\": 0.015555802713590172\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5555555555555556,\n \"acc_stderr\": 0.03388857118502325,\n \"acc_norm\": 0.5555555555555556,\n \"acc_norm_stderr\": 0.03388857118502325\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8431372549019608,\n \"acc_stderr\": 0.025524722324553346,\n \"acc_norm\": 0.8431372549019608,\n \"acc_norm_stderr\": 0.025524722324553346\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8143459915611815,\n \"acc_stderr\": 0.025310495376944853,\n \"acc_norm\": 0.8143459915611815,\n \"acc_norm_stderr\": 0.025310495376944853\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6995515695067265,\n \"acc_stderr\": 0.030769352008229143,\n \"acc_norm\": 0.6995515695067265,\n \"acc_norm_stderr\": 0.030769352008229143\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7862595419847328,\n \"acc_stderr\": 0.0359546161177469,\n \"acc_norm\": 0.7862595419847328,\n \"acc_norm_stderr\": 0.0359546161177469\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7520661157024794,\n \"acc_stderr\": 0.03941897526516302,\n \"acc_norm\": 0.7520661157024794,\n \"acc_norm_stderr\": 0.03941897526516302\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8148148148148148,\n \"acc_stderr\": 0.03755265865037181,\n \"acc_norm\": 0.8148148148148148,\n \"acc_norm_stderr\": 0.03755265865037181\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7791411042944786,\n \"acc_stderr\": 0.03259177392742178,\n \"acc_norm\": 0.7791411042944786,\n \"acc_norm_stderr\": 0.03259177392742178\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5089285714285714,\n \"acc_stderr\": 0.04745033255489123,\n \"acc_norm\": 0.5089285714285714,\n \"acc_norm_stderr\": 0.04745033255489123\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7378640776699029,\n \"acc_stderr\": 0.043546310772605956,\n \"acc_norm\": 0.7378640776699029,\n \"acc_norm_stderr\": 0.043546310772605956\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8589743589743589,\n \"acc_stderr\": 0.022801382534597528,\n \"acc_norm\": 0.8589743589743589,\n \"acc_norm_stderr\": 0.022801382534597528\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8212005108556832,\n \"acc_stderr\": 0.013702643715368985,\n \"acc_norm\": 0.8212005108556832,\n \"acc_norm_stderr\": 0.013702643715368985\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7196531791907514,\n \"acc_stderr\": 0.024182427496577615,\n \"acc_norm\": 0.7196531791907514,\n \"acc_norm_stderr\": 0.024182427496577615\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4011173184357542,\n \"acc_stderr\": 0.016392221899407082,\n \"acc_norm\": 0.4011173184357542,\n \"acc_norm_stderr\": 0.016392221899407082\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.025646863097137897,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.025646863097137897\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7234726688102894,\n \"acc_stderr\": 0.02540383297817961,\n \"acc_norm\": 0.7234726688102894,\n \"acc_norm_stderr\": 0.02540383297817961\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7407407407407407,\n \"acc_stderr\": 0.024383665531035457,\n \"acc_norm\": 0.7407407407407407,\n \"acc_norm_stderr\": 0.024383665531035457\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4787234042553192,\n \"acc_stderr\": 0.029800481645628693,\n \"acc_norm\": 0.4787234042553192,\n \"acc_norm_stderr\": 0.029800481645628693\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4641460234680574,\n \"acc_stderr\": 0.012737361318730583,\n \"acc_norm\": 0.4641460234680574,\n \"acc_norm_stderr\": 0.012737361318730583\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6727941176470589,\n \"acc_stderr\": 0.02850145286039656,\n \"acc_norm\": 0.6727941176470589,\n \"acc_norm_stderr\": 0.02850145286039656\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6683006535947712,\n \"acc_stderr\": 0.019047485239360378,\n \"acc_norm\": 0.6683006535947712,\n \"acc_norm_stderr\": 0.019047485239360378\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7061224489795919,\n \"acc_stderr\": 0.02916273841024977,\n \"acc_norm\": 0.7061224489795919,\n \"acc_norm_stderr\": 0.02916273841024977\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8557213930348259,\n \"acc_stderr\": 0.024845753212306046,\n \"acc_norm\": 0.8557213930348259,\n \"acc_norm_stderr\": 0.024845753212306046\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.89,\n \"acc_stderr\": 0.03144660377352203,\n \"acc_norm\": 0.89,\n \"acc_norm_stderr\": 0.03144660377352203\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5301204819277109,\n \"acc_stderr\": 0.03885425420866767,\n \"acc_norm\": 0.5301204819277109,\n \"acc_norm_stderr\": 0.03885425420866767\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5104039167686658,\n \"mc1_stderr\": 0.017499711430249268,\n \"mc2\": 0.6767350158422528,\n \"mc2_stderr\": 0.015433642831645542\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7884767166535123,\n \"acc_stderr\": 0.01147774768422318\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.43442001516300227,\n \"acc_stderr\": 0.013653507211411406\n }\n}\n```", "repo_url": "https://huggingface.co/mncai/agiin-11.1B-v0.0", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|arc:challenge|25_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|gsm8k|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hellaswag|10_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T15-20-44.774696.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["**/details_harness|winogrande|5_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T15-20-44.774696.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T15_20_44.774696", "path": ["results_2023-12-16T15-20-44.774696.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T15-20-44.774696.parquet"]}]}]}
2023-12-16T15:24:20+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of mncai/agiin-11.1B-v0.0 Dataset automatically created during the evaluation run of model mncai/agiin-11.1B-v0.0 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T15:20:44.774696(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of mncai/agiin-11.1B-v0.0\n\n\n\nDataset automatically created during the evaluation run of model mncai/agiin-11.1B-v0.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T15:20:44.774696(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of mncai/agiin-11.1B-v0.0\n\n\n\nDataset automatically created during the evaluation run of model mncai/agiin-11.1B-v0.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T15:20:44.774696(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 187, 66, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of mncai/agiin-11.1B-v0.0\n\n\n\nDataset automatically created during the evaluation run of model mncai/agiin-11.1B-v0.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T15:20:44.774696(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
e1e4e8cf79e814729835218bd8975b503f44968c
# Dataset Card for Evaluation run of rishiraj/meow <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [rishiraj/meow](https://huggingface.co/rishiraj/meow) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_rishiraj__meow", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T15:20:46.406514](https://huggingface.co/datasets/open-llm-leaderboard/details_rishiraj__meow/blob/main/results_2023-12-16T15-20-46.406514.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6659075802656755, "acc_stderr": 0.03161178751169912, "acc_norm": 0.6668721921879325, "acc_norm_stderr": 0.03225461857791198, "mc1": 0.5593635250917993, "mc1_stderr": 0.01737969755543745, "mc2": 0.7048535292691931, "mc2_stderr": 0.015073637165798839 }, "harness|arc:challenge|25": { "acc": 0.6732081911262798, "acc_stderr": 0.013706665975587335, "acc_norm": 0.7047781569965871, "acc_norm_stderr": 0.013329750293382321 }, "harness|hellaswag|10": { "acc": 0.7040430193188608, "acc_stderr": 0.004555388371756656, "acc_norm": 0.8808006373232424, "acc_norm_stderr": 0.0032336074238899864 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6074074074074074, "acc_stderr": 0.04218506215368879, "acc_norm": 0.6074074074074074, "acc_norm_stderr": 0.04218506215368879 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7302631578947368, "acc_stderr": 0.03611780560284898, "acc_norm": 0.7302631578947368, "acc_norm_stderr": 0.03611780560284898 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.690566037735849, "acc_stderr": 0.02845015479411864, "acc_norm": 0.690566037735849, "acc_norm_stderr": 0.02845015479411864 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7777777777777778, "acc_stderr": 0.03476590104304134, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.03476590104304134 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6589595375722543, "acc_stderr": 0.03614665424180826, "acc_norm": 0.6589595375722543, "acc_norm_stderr": 0.03614665424180826 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107223, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107223 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.04229525846816507, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816507 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6297872340425532, "acc_stderr": 0.03156564682236785, "acc_norm": 0.6297872340425532, "acc_norm_stderr": 0.03156564682236785 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4824561403508772, "acc_stderr": 0.04700708033551038, "acc_norm": 0.4824561403508772, "acc_norm_stderr": 0.04700708033551038 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6206896551724138, "acc_stderr": 0.04043461861916747, "acc_norm": 0.6206896551724138, "acc_norm_stderr": 0.04043461861916747 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.47619047619047616, "acc_stderr": 0.025722097064388535, "acc_norm": 0.47619047619047616, "acc_norm_stderr": 0.025722097064388535 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4444444444444444, "acc_stderr": 0.044444444444444495, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.044444444444444495 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8096774193548387, "acc_stderr": 0.022331707611823074, "acc_norm": 0.8096774193548387, "acc_norm_stderr": 0.022331707611823074 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5024630541871922, "acc_stderr": 0.03517945038691063, "acc_norm": 0.5024630541871922, "acc_norm_stderr": 0.03517945038691063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8121212121212121, "acc_stderr": 0.03050193405942914, "acc_norm": 0.8121212121212121, "acc_norm_stderr": 0.03050193405942914 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8737373737373737, "acc_stderr": 0.02366435940288023, "acc_norm": 0.8737373737373737, "acc_norm_stderr": 0.02366435940288023 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9067357512953368, "acc_stderr": 0.02098685459328973, "acc_norm": 0.9067357512953368, "acc_norm_stderr": 0.02098685459328973 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6564102564102564, "acc_stderr": 0.02407869658063547, "acc_norm": 0.6564102564102564, "acc_norm_stderr": 0.02407869658063547 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.36666666666666664, "acc_stderr": 0.029381620726465073, "acc_norm": 0.36666666666666664, "acc_norm_stderr": 0.029381620726465073 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7184873949579832, "acc_stderr": 0.029213549414372174, "acc_norm": 0.7184873949579832, "acc_norm_stderr": 0.029213549414372174 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3708609271523179, "acc_stderr": 0.03943966699183629, "acc_norm": 0.3708609271523179, "acc_norm_stderr": 0.03943966699183629 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8477064220183487, "acc_stderr": 0.015405084393157074, "acc_norm": 0.8477064220183487, "acc_norm_stderr": 0.015405084393157074 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5601851851851852, "acc_stderr": 0.0338517797604481, "acc_norm": 0.5601851851851852, "acc_norm_stderr": 0.0338517797604481 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8382352941176471, "acc_stderr": 0.025845017986926917, "acc_norm": 0.8382352941176471, "acc_norm_stderr": 0.025845017986926917 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8565400843881856, "acc_stderr": 0.022818291821017012, "acc_norm": 0.8565400843881856, "acc_norm_stderr": 0.022818291821017012 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6860986547085202, "acc_stderr": 0.03114679648297246, "acc_norm": 0.6860986547085202, "acc_norm_stderr": 0.03114679648297246 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7404580152671756, "acc_stderr": 0.03844876139785271, "acc_norm": 0.7404580152671756, "acc_norm_stderr": 0.03844876139785271 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7933884297520661, "acc_stderr": 0.03695980128098824, "acc_norm": 0.7933884297520661, "acc_norm_stderr": 0.03695980128098824 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7870370370370371, "acc_stderr": 0.0395783547198098, "acc_norm": 0.7870370370370371, "acc_norm_stderr": 0.0395783547198098 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7484662576687117, "acc_stderr": 0.03408997886857529, "acc_norm": 0.7484662576687117, "acc_norm_stderr": 0.03408997886857529 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4642857142857143, "acc_stderr": 0.04733667890053756, "acc_norm": 0.4642857142857143, "acc_norm_stderr": 0.04733667890053756 }, "harness|hendrycksTest-management|5": { "acc": 0.8349514563106796, "acc_stderr": 0.03675668832233188, "acc_norm": 0.8349514563106796, "acc_norm_stderr": 0.03675668832233188 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8589743589743589, "acc_stderr": 0.022801382534597528, "acc_norm": 0.8589743589743589, "acc_norm_stderr": 0.022801382534597528 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8071519795657727, "acc_stderr": 0.014108533515757431, "acc_norm": 0.8071519795657727, "acc_norm_stderr": 0.014108533515757431 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7630057803468208, "acc_stderr": 0.02289408248992599, "acc_norm": 0.7630057803468208, "acc_norm_stderr": 0.02289408248992599 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3743016759776536, "acc_stderr": 0.01618544417945717, "acc_norm": 0.3743016759776536, "acc_norm_stderr": 0.01618544417945717 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7679738562091504, "acc_stderr": 0.024170840879340863, "acc_norm": 0.7679738562091504, "acc_norm_stderr": 0.024170840879340863 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7331189710610932, "acc_stderr": 0.025122637608816643, "acc_norm": 0.7331189710610932, "acc_norm_stderr": 0.025122637608816643 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7870370370370371, "acc_stderr": 0.022779719088733396, "acc_norm": 0.7870370370370371, "acc_norm_stderr": 0.022779719088733396 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5070921985815603, "acc_stderr": 0.02982449855912901, "acc_norm": 0.5070921985815603, "acc_norm_stderr": 0.02982449855912901 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.49022164276401564, "acc_stderr": 0.012767793787729338, "acc_norm": 0.49022164276401564, "acc_norm_stderr": 0.012767793787729338 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7389705882352942, "acc_stderr": 0.026679252270103128, "acc_norm": 0.7389705882352942, "acc_norm_stderr": 0.026679252270103128 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.696078431372549, "acc_stderr": 0.018607552131279827, "acc_norm": 0.696078431372549, "acc_norm_stderr": 0.018607552131279827 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7, "acc_stderr": 0.04389311454644287, "acc_norm": 0.7, "acc_norm_stderr": 0.04389311454644287 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.746938775510204, "acc_stderr": 0.027833023871399677, "acc_norm": 0.746938775510204, "acc_norm_stderr": 0.027833023871399677 }, "harness|hendrycksTest-sociology|5": { "acc": 0.835820895522388, "acc_stderr": 0.026193923544454125, "acc_norm": 0.835820895522388, "acc_norm_stderr": 0.026193923544454125 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.91, "acc_stderr": 0.028762349126466125, "acc_norm": 0.91, "acc_norm_stderr": 0.028762349126466125 }, "harness|hendrycksTest-virology|5": { "acc": 0.5783132530120482, "acc_stderr": 0.038444531817709175, "acc_norm": 0.5783132530120482, "acc_norm_stderr": 0.038444531817709175 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7894736842105263, "acc_stderr": 0.03126781714663179, "acc_norm": 0.7894736842105263, "acc_norm_stderr": 0.03126781714663179 }, "harness|truthfulqa:mc|0": { "mc1": 0.5593635250917993, "mc1_stderr": 0.01737969755543745, "mc2": 0.7048535292691931, "mc2_stderr": 0.015073637165798839 }, "harness|winogrande|5": { "acc": 0.8342541436464088, "acc_stderr": 0.010450899545370634 }, "harness|gsm8k|5": { "acc": 0.6489764973464746, "acc_stderr": 0.013146945941397228 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_rishiraj__meow
[ "region:us" ]
2023-12-16T15:23:41+00:00
{"pretty_name": "Evaluation run of rishiraj/meow", "dataset_summary": "Dataset automatically created during the evaluation run of model [rishiraj/meow](https://huggingface.co/rishiraj/meow) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_rishiraj__meow\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T15:20:46.406514](https://huggingface.co/datasets/open-llm-leaderboard/details_rishiraj__meow/blob/main/results_2023-12-16T15-20-46.406514.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6659075802656755,\n \"acc_stderr\": 0.03161178751169912,\n \"acc_norm\": 0.6668721921879325,\n \"acc_norm_stderr\": 0.03225461857791198,\n \"mc1\": 0.5593635250917993,\n \"mc1_stderr\": 0.01737969755543745,\n \"mc2\": 0.7048535292691931,\n \"mc2_stderr\": 0.015073637165798839\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6732081911262798,\n \"acc_stderr\": 0.013706665975587335,\n \"acc_norm\": 0.7047781569965871,\n \"acc_norm_stderr\": 0.013329750293382321\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7040430193188608,\n \"acc_stderr\": 0.004555388371756656,\n \"acc_norm\": 0.8808006373232424,\n \"acc_norm_stderr\": 0.0032336074238899864\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6074074074074074,\n \"acc_stderr\": 0.04218506215368879,\n \"acc_norm\": 0.6074074074074074,\n \"acc_norm_stderr\": 0.04218506215368879\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7302631578947368,\n \"acc_stderr\": 0.03611780560284898,\n \"acc_norm\": 0.7302631578947368,\n \"acc_norm_stderr\": 0.03611780560284898\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.690566037735849,\n \"acc_stderr\": 0.02845015479411864,\n \"acc_norm\": 0.690566037735849,\n \"acc_norm_stderr\": 0.02845015479411864\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.03476590104304134,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.03476590104304134\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6589595375722543,\n \"acc_stderr\": 0.03614665424180826,\n \"acc_norm\": 0.6589595375722543,\n \"acc_norm_stderr\": 0.03614665424180826\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107223,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107223\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816507,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816507\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6297872340425532,\n \"acc_stderr\": 0.03156564682236785,\n \"acc_norm\": 0.6297872340425532,\n \"acc_norm_stderr\": 0.03156564682236785\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4824561403508772,\n \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.4824561403508772,\n \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6206896551724138,\n \"acc_stderr\": 0.04043461861916747,\n \"acc_norm\": 0.6206896551724138,\n \"acc_norm_stderr\": 0.04043461861916747\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.47619047619047616,\n \"acc_stderr\": 0.025722097064388535,\n \"acc_norm\": 0.47619047619047616,\n \"acc_norm_stderr\": 0.025722097064388535\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.044444444444444495,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.044444444444444495\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8096774193548387,\n \"acc_stderr\": 0.022331707611823074,\n \"acc_norm\": 0.8096774193548387,\n \"acc_norm_stderr\": 0.022331707611823074\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5024630541871922,\n \"acc_stderr\": 0.03517945038691063,\n \"acc_norm\": 0.5024630541871922,\n \"acc_norm_stderr\": 0.03517945038691063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8121212121212121,\n \"acc_stderr\": 0.03050193405942914,\n \"acc_norm\": 0.8121212121212121,\n \"acc_norm_stderr\": 0.03050193405942914\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8737373737373737,\n \"acc_stderr\": 0.02366435940288023,\n \"acc_norm\": 0.8737373737373737,\n \"acc_norm_stderr\": 0.02366435940288023\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9067357512953368,\n \"acc_stderr\": 0.02098685459328973,\n \"acc_norm\": 0.9067357512953368,\n \"acc_norm_stderr\": 0.02098685459328973\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6564102564102564,\n \"acc_stderr\": 0.02407869658063547,\n \"acc_norm\": 0.6564102564102564,\n \"acc_norm_stderr\": 0.02407869658063547\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.36666666666666664,\n \"acc_stderr\": 0.029381620726465073,\n \"acc_norm\": 0.36666666666666664,\n \"acc_norm_stderr\": 0.029381620726465073\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7184873949579832,\n \"acc_stderr\": 0.029213549414372174,\n \"acc_norm\": 0.7184873949579832,\n \"acc_norm_stderr\": 0.029213549414372174\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3708609271523179,\n \"acc_stderr\": 0.03943966699183629,\n \"acc_norm\": 0.3708609271523179,\n \"acc_norm_stderr\": 0.03943966699183629\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8477064220183487,\n \"acc_stderr\": 0.015405084393157074,\n \"acc_norm\": 0.8477064220183487,\n \"acc_norm_stderr\": 0.015405084393157074\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5601851851851852,\n \"acc_stderr\": 0.0338517797604481,\n \"acc_norm\": 0.5601851851851852,\n \"acc_norm_stderr\": 0.0338517797604481\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8382352941176471,\n \"acc_stderr\": 0.025845017986926917,\n \"acc_norm\": 0.8382352941176471,\n \"acc_norm_stderr\": 0.025845017986926917\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8565400843881856,\n \"acc_stderr\": 0.022818291821017012,\n \"acc_norm\": 0.8565400843881856,\n \"acc_norm_stderr\": 0.022818291821017012\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6860986547085202,\n \"acc_stderr\": 0.03114679648297246,\n \"acc_norm\": 0.6860986547085202,\n \"acc_norm_stderr\": 0.03114679648297246\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7404580152671756,\n \"acc_stderr\": 0.03844876139785271,\n \"acc_norm\": 0.7404580152671756,\n \"acc_norm_stderr\": 0.03844876139785271\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098824,\n \"acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098824\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7870370370370371,\n \"acc_stderr\": 0.0395783547198098,\n \"acc_norm\": 0.7870370370370371,\n \"acc_norm_stderr\": 0.0395783547198098\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7484662576687117,\n \"acc_stderr\": 0.03408997886857529,\n \"acc_norm\": 0.7484662576687117,\n \"acc_norm_stderr\": 0.03408997886857529\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8349514563106796,\n \"acc_stderr\": 0.03675668832233188,\n \"acc_norm\": 0.8349514563106796,\n \"acc_norm_stderr\": 0.03675668832233188\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8589743589743589,\n \"acc_stderr\": 0.022801382534597528,\n \"acc_norm\": 0.8589743589743589,\n \"acc_norm_stderr\": 0.022801382534597528\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8071519795657727,\n \"acc_stderr\": 0.014108533515757431,\n \"acc_norm\": 0.8071519795657727,\n \"acc_norm_stderr\": 0.014108533515757431\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7630057803468208,\n \"acc_stderr\": 0.02289408248992599,\n \"acc_norm\": 0.7630057803468208,\n \"acc_norm_stderr\": 0.02289408248992599\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3743016759776536,\n \"acc_stderr\": 0.01618544417945717,\n \"acc_norm\": 0.3743016759776536,\n \"acc_norm_stderr\": 0.01618544417945717\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7679738562091504,\n \"acc_stderr\": 0.024170840879340863,\n \"acc_norm\": 0.7679738562091504,\n \"acc_norm_stderr\": 0.024170840879340863\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7331189710610932,\n \"acc_stderr\": 0.025122637608816643,\n \"acc_norm\": 0.7331189710610932,\n \"acc_norm_stderr\": 0.025122637608816643\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7870370370370371,\n \"acc_stderr\": 0.022779719088733396,\n \"acc_norm\": 0.7870370370370371,\n \"acc_norm_stderr\": 0.022779719088733396\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5070921985815603,\n \"acc_stderr\": 0.02982449855912901,\n \"acc_norm\": 0.5070921985815603,\n \"acc_norm_stderr\": 0.02982449855912901\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.49022164276401564,\n \"acc_stderr\": 0.012767793787729338,\n \"acc_norm\": 0.49022164276401564,\n \"acc_norm_stderr\": 0.012767793787729338\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7389705882352942,\n \"acc_stderr\": 0.026679252270103128,\n \"acc_norm\": 0.7389705882352942,\n \"acc_norm_stderr\": 0.026679252270103128\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.696078431372549,\n \"acc_stderr\": 0.018607552131279827,\n \"acc_norm\": 0.696078431372549,\n \"acc_norm_stderr\": 0.018607552131279827\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.04389311454644287,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.04389311454644287\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.746938775510204,\n \"acc_stderr\": 0.027833023871399677,\n \"acc_norm\": 0.746938775510204,\n \"acc_norm_stderr\": 0.027833023871399677\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.026193923544454125,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.026193923544454125\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.91,\n \"acc_stderr\": 0.028762349126466125,\n \"acc_norm\": 0.91,\n \"acc_norm_stderr\": 0.028762349126466125\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5783132530120482,\n \"acc_stderr\": 0.038444531817709175,\n \"acc_norm\": 0.5783132530120482,\n \"acc_norm_stderr\": 0.038444531817709175\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7894736842105263,\n \"acc_stderr\": 0.03126781714663179,\n \"acc_norm\": 0.7894736842105263,\n \"acc_norm_stderr\": 0.03126781714663179\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5593635250917993,\n \"mc1_stderr\": 0.01737969755543745,\n \"mc2\": 0.7048535292691931,\n \"mc2_stderr\": 0.015073637165798839\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8342541436464088,\n \"acc_stderr\": 0.010450899545370634\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6489764973464746,\n \"acc_stderr\": 0.013146945941397228\n }\n}\n```", "repo_url": "https://huggingface.co/rishiraj/meow", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|arc:challenge|25_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|gsm8k|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hellaswag|10_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T15-20-46.406514.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["**/details_harness|winogrande|5_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T15-20-46.406514.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T15_20_46.406514", "path": ["results_2023-12-16T15-20-46.406514.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T15-20-46.406514.parquet"]}]}]}
2023-12-16T15:24:23+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of rishiraj/meow Dataset automatically created during the evaluation run of model rishiraj/meow on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T15:20:46.406514(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of rishiraj/meow\n\n\n\nDataset automatically created during the evaluation run of model rishiraj/meow on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T15:20:46.406514(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of rishiraj/meow\n\n\n\nDataset automatically created during the evaluation run of model rishiraj/meow on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T15:20:46.406514(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 173, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of rishiraj/meow\n\n\n\nDataset automatically created during the evaluation run of model rishiraj/meow on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T15:20:46.406514(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
0b46e33c18952d052d9dbce9985db766e8929eb0
# 唐诗配图数据集 使用《唐诗三百首》中全部五言律诗80首、七言律诗54首、五言绝句37首、七言绝句60首,共231首诗。 每首诗分别使用以下三种prompt格式,通过DALL·E 3生成三张宽幅图片,共693张图片: 1. 请根据{作者}的唐诗作画, 画面中不要有文字: {唐诗正文} 2. {唐诗正文} 3. 请根据{作者}的唐诗《{标题}》作画, 画面中不要有文字: {唐诗正文} ## 数据集各字段描述 `image`: 图片文件名。本数据集的图片全部是分辨率为1792x1024的宽幅图片,质量全部为hd. `poem_id`: 唐诗序号,格式为{诗体}_{序号},{诗体}可以是wulv(五言律诗), qilv(七言律诗), wujue(五言绝句), qijue(七言绝句),序号即该诗在该诗体中的编号,顺序和《唐诗三百首》相同。 `prompt`: 输入给DALL·E 3的原始提示词。 `revised_prompt`: DALL·E 3根据原始提示词自动完善的绘画提示词,即DALL·E 3 api返回值的`revised_prompt`字段。 ## 用途 本数据集可以用于为《唐诗三百首》提供插图,也可以用于微调文生图模型以适用于诗句配图任务,也可以用于微调语言模型以适用于由诗句生成可视化的描述词的任务。 ## 局限性 相当一部分图片中含有文字,而文字一般并不正确,英文还有部分拼写正确的,汉字基本上是乱码。部分图片和唐诗的主题不一定匹配,若将本数据集用于有较高精确度需求的任务(例如出版插图版的《唐诗三百首》书籍),则需要严格检查匹配度。 欢迎大家纠正数据集中的错误或贡献更多数据! ## Contact author QQ: 583753622
hugfaceguy0001/TangshiDalle3Images
[ "task_categories:text-to-image", "size_categories:n<1K", "language:en", "language:zh", "license:openrail", "art", "culture", "poem", "dalle3", "diffusion", "Chinese", "region:us" ]
2023-12-16T15:24:29+00:00
{"language": ["en", "zh"], "license": "openrail", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "pretty_name": "\u5510\u8bd7\u914d\u56fe\u6570\u636e\u96c6", "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "poem_id", "dtype": "string"}, {"name": "prompt", "dtype": "string"}, {"name": "revised_prompt", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 3817427239, "num_examples": 693}], "download_size": 3485749230, "dataset_size": 3817427239}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "tags": ["art", "culture", "poem", "dalle3", "diffusion", "Chinese"]}
2023-12-16T16:52:02+00:00
[]
[ "en", "zh" ]
TAGS #task_categories-text-to-image #size_categories-n<1K #language-English #language-Chinese #license-openrail #art #culture #poem #dalle3 #diffusion #Chinese #region-us
# 唐诗配图数据集 使用《唐诗三百首》中全部五言律诗80首、七言律诗54首、五言绝句37首、七言绝句60首,共231首诗。 每首诗分别使用以下三种prompt格式,通过DALL·E 3生成三张宽幅图片,共693张图片: 1. 请根据{作者}的唐诗作画, 画面中不要有文字: {唐诗正文} 2. {唐诗正文} 3. 请根据{作者}的唐诗《{标题}》作画, 画面中不要有文字: {唐诗正文} ## 数据集各字段描述 'image': 图片文件名。本数据集的图片全部是分辨率为1792x1024的宽幅图片,质量全部为hd. 'poem_id': 唐诗序号,格式为{诗体}_{序号},{诗体}可以是wulv(五言律诗), qilv(七言律诗), wujue(五言绝句), qijue(七言绝句),序号即该诗在该诗体中的编号,顺序和《唐诗三百首》相同。 'prompt': 输入给DALL·E 3的原始提示词。 'revised_prompt': DALL·E 3根据原始提示词自动完善的绘画提示词,即DALL·E 3 api返回值的'revised_prompt'字段。 ## 用途 本数据集可以用于为《唐诗三百首》提供插图,也可以用于微调文生图模型以适用于诗句配图任务,也可以用于微调语言模型以适用于由诗句生成可视化的描述词的任务。 ## 局限性 相当一部分图片中含有文字,而文字一般并不正确,英文还有部分拼写正确的,汉字基本上是乱码。部分图片和唐诗的主题不一定匹配,若将本数据集用于有较高精确度需求的任务(例如出版插图版的《唐诗三百首》书籍),则需要严格检查匹配度。 欢迎大家纠正数据集中的错误或贡献更多数据! ## Contact author QQ: 583753622
[ "# 唐诗配图数据集\n使用《唐诗三百首》中全部五言律诗80首、七言律诗54首、五言绝句37首、七言绝句60首,共231首诗。\n\n每首诗分别使用以下三种prompt格式,通过DALL·E 3生成三张宽幅图片,共693张图片:\n\n1. 请根据{作者}的唐诗作画, 画面中不要有文字: {唐诗正文}\n2. {唐诗正文}\n3. 请根据{作者}的唐诗《{标题}》作画, 画面中不要有文字: {唐诗正文}", "## 数据集各字段描述\n'image': 图片文件名。本数据集的图片全部是分辨率为1792x1024的宽幅图片,质量全部为hd.\n\n'poem_id': 唐诗序号,格式为{诗体}_{序号},{诗体}可以是wulv(五言律诗), qilv(七言律诗), wujue(五言绝句), qijue(七言绝句),序号即该诗在该诗体中的编号,顺序和《唐诗三百首》相同。\n\n'prompt': 输入给DALL·E 3的原始提示词。\n\n'revised_prompt': DALL·E 3根据原始提示词自动完善的绘画提示词,即DALL·E 3 api返回值的'revised_prompt'字段。", "## 用途\n本数据集可以用于为《唐诗三百首》提供插图,也可以用于微调文生图模型以适用于诗句配图任务,也可以用于微调语言模型以适用于由诗句生成可视化的描述词的任务。", "## 局限性\n相当一部分图片中含有文字,而文字一般并不正确,英文还有部分拼写正确的,汉字基本上是乱码。部分图片和唐诗的主题不一定匹配,若将本数据集用于有较高精确度需求的任务(例如出版插图版的《唐诗三百首》书籍),则需要严格检查匹配度。\n\n欢迎大家纠正数据集中的错误或贡献更多数据!", "## Contact author\nQQ: 583753622" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #language-English #language-Chinese #license-openrail #art #culture #poem #dalle3 #diffusion #Chinese #region-us \n", "# 唐诗配图数据集\n使用《唐诗三百首》中全部五言律诗80首、七言律诗54首、五言绝句37首、七言绝句60首,共231首诗。\n\n每首诗分别使用以下三种prompt格式,通过DALL·E 3生成三张宽幅图片,共693张图片:\n\n1. 请根据{作者}的唐诗作画, 画面中不要有文字: {唐诗正文}\n2. {唐诗正文}\n3. 请根据{作者}的唐诗《{标题}》作画, 画面中不要有文字: {唐诗正文}", "## 数据集各字段描述\n'image': 图片文件名。本数据集的图片全部是分辨率为1792x1024的宽幅图片,质量全部为hd.\n\n'poem_id': 唐诗序号,格式为{诗体}_{序号},{诗体}可以是wulv(五言律诗), qilv(七言律诗), wujue(五言绝句), qijue(七言绝句),序号即该诗在该诗体中的编号,顺序和《唐诗三百首》相同。\n\n'prompt': 输入给DALL·E 3的原始提示词。\n\n'revised_prompt': DALL·E 3根据原始提示词自动完善的绘画提示词,即DALL·E 3 api返回值的'revised_prompt'字段。", "## 用途\n本数据集可以用于为《唐诗三百首》提供插图,也可以用于微调文生图模型以适用于诗句配图任务,也可以用于微调语言模型以适用于由诗句生成可视化的描述词的任务。", "## 局限性\n相当一部分图片中含有文字,而文字一般并不正确,英文还有部分拼写正确的,汉字基本上是乱码。部分图片和唐诗的主题不一定匹配,若将本数据集用于有较高精确度需求的任务(例如出版插图版的《唐诗三百首》书籍),则需要严格检查匹配度。\n\n欢迎大家纠正数据集中的错误或贡献更多数据!", "## Contact author\nQQ: 583753622" ]
[ 61, 150, 195, 56, 93, 10 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #language-English #language-Chinese #license-openrail #art #culture #poem #dalle3 #diffusion #Chinese #region-us \n# 唐诗配图数据集\n使用《唐诗三百首》中全部五言律诗80首、七言律诗54首、五言绝句37首、七言绝句60首,共231首诗。\n\n每首诗分别使用以下三种prompt格式,通过DALL·E 3生成三张宽幅图片,共693张图片:\n\n1. 请根据{作者}的唐诗作画, 画面中不要有文字: {唐诗正文}\n2. {唐诗正文}\n3. 请根据{作者}的唐诗《{标题}》作画, 画面中不要有文字: {唐诗正文}## 数据集各字段描述\n'image': 图片文件名。本数据集的图片全部是分辨率为1792x1024的宽幅图片,质量全部为hd.\n\n'poem_id': 唐诗序号,格式为{诗体}_{序号},{诗体}可以是wulv(五言律诗), qilv(七言律诗), wujue(五言绝句), qijue(七言绝句),序号即该诗在该诗体中的编号,顺序和《唐诗三百首》相同。\n\n'prompt': 输入给DALL·E 3的原始提示词。\n\n'revised_prompt': DALL·E 3根据原始提示词自动完善的绘画提示词,即DALL·E 3 api返回值的'revised_prompt'字段。## 用途\n本数据集可以用于为《唐诗三百首》提供插图,也可以用于微调文生图模型以适用于诗句配图任务,也可以用于微调语言模型以适用于由诗句生成可视化的描述词的任务。" ]
716fa8f57e010749a316eae3c0ddd669420959fa
csv-Elemento-ai
AndrewRicc/csv-Elemento-ai
[ "region:us" ]
2023-12-16T15:27:04+00:00
{}
2023-12-16T16:04:42+00:00
[]
[]
TAGS #region-us
csv-Elemento-ai
[]
[ "TAGS\n#region-us \n" ]
[ 6 ]
[ "passage: TAGS\n#region-us \n" ]
a2529ef441290e33bec76c22107c937c5c88be7a
# Dataset Card for Evaluation run of mediocredev/open-llama-3b-v2-instruct <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [mediocredev/open-llama-3b-v2-instruct](https://huggingface.co/mediocredev/open-llama-3b-v2-instruct) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_mediocredev__open-llama-3b-v2-instruct", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T15:28:20.399841](https://huggingface.co/datasets/open-llm-leaderboard/details_mediocredev__open-llama-3b-v2-instruct/blob/main/results_2023-12-16T15-28-20.399841.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.3958981300034306, "acc_stderr": 0.034198998112262805, "acc_norm": 0.4018856544108267, "acc_norm_stderr": 0.035129135992579406, "mc1": 0.2252141982864137, "mc1_stderr": 0.014623240768023498, "mc2": 0.3795634078796446, "mc2_stderr": 0.014273839655133331 }, "harness|arc:challenge|25": { "acc": 0.35409556313993173, "acc_stderr": 0.01397545412275655, "acc_norm": 0.3848122866894198, "acc_norm_stderr": 0.014218371065251104 }, "harness|hellaswag|10": { "acc": 0.5142401911969727, "acc_stderr": 0.0049877573147698445, "acc_norm": 0.7024497112129058, "acc_norm_stderr": 0.004562462665505218 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.27, "acc_stderr": 0.044619604333847415, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847415 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.45185185185185184, "acc_stderr": 0.04299268905480864, "acc_norm": 0.45185185185185184, "acc_norm_stderr": 0.04299268905480864 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.3815789473684211, "acc_stderr": 0.03953173377749194, "acc_norm": 0.3815789473684211, "acc_norm_stderr": 0.03953173377749194 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.4377358490566038, "acc_stderr": 0.03053333843046751, "acc_norm": 0.4377358490566038, "acc_norm_stderr": 0.03053333843046751 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.3680555555555556, "acc_stderr": 0.04032999053960719, "acc_norm": 0.3680555555555556, "acc_norm_stderr": 0.04032999053960719 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.43352601156069365, "acc_stderr": 0.03778621079092055, "acc_norm": 0.43352601156069365, "acc_norm_stderr": 0.03778621079092055 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.19607843137254902, "acc_stderr": 0.03950581861179961, "acc_norm": 0.19607843137254902, "acc_norm_stderr": 0.03950581861179961 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.3659574468085106, "acc_stderr": 0.0314895582974553, "acc_norm": 0.3659574468085106, "acc_norm_stderr": 0.0314895582974553 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2894736842105263, "acc_stderr": 0.042663394431593935, "acc_norm": 0.2894736842105263, "acc_norm_stderr": 0.042663394431593935 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.3931034482758621, "acc_stderr": 0.0407032901370707, "acc_norm": 0.3931034482758621, "acc_norm_stderr": 0.0407032901370707 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2698412698412698, "acc_stderr": 0.022860838309232072, "acc_norm": 0.2698412698412698, "acc_norm_stderr": 0.022860838309232072 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.30158730158730157, "acc_stderr": 0.041049472699033945, "acc_norm": 0.30158730158730157, "acc_norm_stderr": 0.041049472699033945 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.047609522856952365, "acc_norm": 0.34, "acc_norm_stderr": 0.047609522856952365 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.4161290322580645, "acc_stderr": 0.028040981380761547, "acc_norm": 0.4161290322580645, "acc_norm_stderr": 0.028040981380761547 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.2660098522167488, "acc_stderr": 0.03108982600293753, "acc_norm": 0.2660098522167488, "acc_norm_stderr": 0.03108982600293753 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.37, "acc_stderr": 0.04852365870939098, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939098 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.4484848484848485, "acc_stderr": 0.038835659779569286, "acc_norm": 0.4484848484848485, "acc_norm_stderr": 0.038835659779569286 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.5454545454545454, "acc_stderr": 0.03547601494006937, "acc_norm": 0.5454545454545454, "acc_norm_stderr": 0.03547601494006937 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.48186528497409326, "acc_stderr": 0.03606065001832919, "acc_norm": 0.48186528497409326, "acc_norm_stderr": 0.03606065001832919 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.35128205128205126, "acc_stderr": 0.024203665177902796, "acc_norm": 0.35128205128205126, "acc_norm_stderr": 0.024203665177902796 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.026719240783712166, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.026719240783712166 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.35714285714285715, "acc_stderr": 0.031124619309328177, "acc_norm": 0.35714285714285715, "acc_norm_stderr": 0.031124619309328177 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2980132450331126, "acc_stderr": 0.037345356767871984, "acc_norm": 0.2980132450331126, "acc_norm_stderr": 0.037345356767871984 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.4972477064220184, "acc_stderr": 0.02143699835976532, "acc_norm": 0.4972477064220184, "acc_norm_stderr": 0.02143699835976532 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.3055555555555556, "acc_stderr": 0.03141554629402544, "acc_norm": 0.3055555555555556, "acc_norm_stderr": 0.03141554629402544 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.46078431372549017, "acc_stderr": 0.03498501649369527, "acc_norm": 0.46078431372549017, "acc_norm_stderr": 0.03498501649369527 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.5611814345991561, "acc_stderr": 0.032302649315470375, "acc_norm": 0.5611814345991561, "acc_norm_stderr": 0.032302649315470375 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.4977578475336323, "acc_stderr": 0.033557465352232634, "acc_norm": 0.4977578475336323, "acc_norm_stderr": 0.033557465352232634 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.46564885496183206, "acc_stderr": 0.04374928560599738, "acc_norm": 0.46564885496183206, "acc_norm_stderr": 0.04374928560599738 }, "harness|hendrycksTest-international_law|5": { "acc": 0.47107438016528924, "acc_stderr": 0.04556710331269498, "acc_norm": 0.47107438016528924, "acc_norm_stderr": 0.04556710331269498 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.4166666666666667, "acc_stderr": 0.04766075165356461, "acc_norm": 0.4166666666666667, "acc_norm_stderr": 0.04766075165356461 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.4110429447852761, "acc_stderr": 0.038656978537853624, "acc_norm": 0.4110429447852761, "acc_norm_stderr": 0.038656978537853624 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3125, "acc_stderr": 0.043994650575715215, "acc_norm": 0.3125, "acc_norm_stderr": 0.043994650575715215 }, "harness|hendrycksTest-management|5": { "acc": 0.5533980582524272, "acc_stderr": 0.04922424153458933, "acc_norm": 0.5533980582524272, "acc_norm_stderr": 0.04922424153458933 }, "harness|hendrycksTest-marketing|5": { "acc": 0.5341880341880342, "acc_stderr": 0.03267942734081228, "acc_norm": 0.5341880341880342, "acc_norm_stderr": 0.03267942734081228 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.5261813537675607, "acc_stderr": 0.01785543455404199, "acc_norm": 0.5261813537675607, "acc_norm_stderr": 0.01785543455404199 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.4277456647398844, "acc_stderr": 0.02663653974111608, "acc_norm": 0.4277456647398844, "acc_norm_stderr": 0.02663653974111608 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.25139664804469275, "acc_stderr": 0.014508979453553984, "acc_norm": 0.25139664804469275, "acc_norm_stderr": 0.014508979453553984 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.42810457516339867, "acc_stderr": 0.028332397483664274, "acc_norm": 0.42810457516339867, "acc_norm_stderr": 0.028332397483664274 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.40192926045016075, "acc_stderr": 0.027846476005930473, "acc_norm": 0.40192926045016075, "acc_norm_stderr": 0.027846476005930473 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.41358024691358025, "acc_stderr": 0.027402042040269952, "acc_norm": 0.41358024691358025, "acc_norm_stderr": 0.027402042040269952 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.3049645390070922, "acc_stderr": 0.027464708442022128, "acc_norm": 0.3049645390070922, "acc_norm_stderr": 0.027464708442022128 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.30964797913950454, "acc_stderr": 0.01180859826250332, "acc_norm": 0.30964797913950454, "acc_norm_stderr": 0.01180859826250332 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.3786764705882353, "acc_stderr": 0.029465133639776132, "acc_norm": 0.3786764705882353, "acc_norm_stderr": 0.029465133639776132 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.36437908496732024, "acc_stderr": 0.019469518221573702, "acc_norm": 0.36437908496732024, "acc_norm_stderr": 0.019469518221573702 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5, "acc_stderr": 0.04789131426105757, "acc_norm": 0.5, "acc_norm_stderr": 0.04789131426105757 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.32653061224489793, "acc_stderr": 0.030021056238440286, "acc_norm": 0.32653061224489793, "acc_norm_stderr": 0.030021056238440286 }, "harness|hendrycksTest-sociology|5": { "acc": 0.4527363184079602, "acc_stderr": 0.035197027175769155, "acc_norm": 0.4527363184079602, "acc_norm_stderr": 0.035197027175769155 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.6, "acc_stderr": 0.049236596391733084, "acc_norm": 0.6, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-virology|5": { "acc": 0.39156626506024095, "acc_stderr": 0.03799857454479636, "acc_norm": 0.39156626506024095, "acc_norm_stderr": 0.03799857454479636 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.49707602339181284, "acc_stderr": 0.03834759370936839, "acc_norm": 0.49707602339181284, "acc_norm_stderr": 0.03834759370936839 }, "harness|truthfulqa:mc|0": { "mc1": 0.2252141982864137, "mc1_stderr": 0.014623240768023498, "mc2": 0.3795634078796446, "mc2_stderr": 0.014273839655133331 }, "harness|winogrande|5": { "acc": 0.6574585635359116, "acc_stderr": 0.013337483579075923 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_mediocredev__open-llama-3b-v2-instruct
[ "region:us" ]
2023-12-16T15:30:32+00:00
{"pretty_name": "Evaluation run of mediocredev/open-llama-3b-v2-instruct", "dataset_summary": "Dataset automatically created during the evaluation run of model [mediocredev/open-llama-3b-v2-instruct](https://huggingface.co/mediocredev/open-llama-3b-v2-instruct) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_mediocredev__open-llama-3b-v2-instruct\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T15:28:20.399841](https://huggingface.co/datasets/open-llm-leaderboard/details_mediocredev__open-llama-3b-v2-instruct/blob/main/results_2023-12-16T15-28-20.399841.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.3958981300034306,\n \"acc_stderr\": 0.034198998112262805,\n \"acc_norm\": 0.4018856544108267,\n \"acc_norm_stderr\": 0.035129135992579406,\n \"mc1\": 0.2252141982864137,\n \"mc1_stderr\": 0.014623240768023498,\n \"mc2\": 0.3795634078796446,\n \"mc2_stderr\": 0.014273839655133331\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.35409556313993173,\n \"acc_stderr\": 0.01397545412275655,\n \"acc_norm\": 0.3848122866894198,\n \"acc_norm_stderr\": 0.014218371065251104\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5142401911969727,\n \"acc_stderr\": 0.0049877573147698445,\n \"acc_norm\": 0.7024497112129058,\n \"acc_norm_stderr\": 0.004562462665505218\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847415,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847415\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.45185185185185184,\n \"acc_stderr\": 0.04299268905480864,\n \"acc_norm\": 0.45185185185185184,\n \"acc_norm_stderr\": 0.04299268905480864\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.3815789473684211,\n \"acc_stderr\": 0.03953173377749194,\n \"acc_norm\": 0.3815789473684211,\n \"acc_norm_stderr\": 0.03953173377749194\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.4377358490566038,\n \"acc_stderr\": 0.03053333843046751,\n \"acc_norm\": 0.4377358490566038,\n \"acc_norm_stderr\": 0.03053333843046751\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.3680555555555556,\n \"acc_stderr\": 0.04032999053960719,\n \"acc_norm\": 0.3680555555555556,\n \"acc_norm_stderr\": 0.04032999053960719\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.43352601156069365,\n \"acc_stderr\": 0.03778621079092055,\n \"acc_norm\": 0.43352601156069365,\n \"acc_norm_stderr\": 0.03778621079092055\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.19607843137254902,\n \"acc_stderr\": 0.03950581861179961,\n \"acc_norm\": 0.19607843137254902,\n \"acc_norm_stderr\": 0.03950581861179961\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.3659574468085106,\n \"acc_stderr\": 0.0314895582974553,\n \"acc_norm\": 0.3659574468085106,\n \"acc_norm_stderr\": 0.0314895582974553\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2894736842105263,\n \"acc_stderr\": 0.042663394431593935,\n \"acc_norm\": 0.2894736842105263,\n \"acc_norm_stderr\": 0.042663394431593935\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.3931034482758621,\n \"acc_stderr\": 0.0407032901370707,\n \"acc_norm\": 0.3931034482758621,\n \"acc_norm_stderr\": 0.0407032901370707\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2698412698412698,\n \"acc_stderr\": 0.022860838309232072,\n \"acc_norm\": 0.2698412698412698,\n \"acc_norm_stderr\": 0.022860838309232072\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.30158730158730157,\n \"acc_stderr\": 0.041049472699033945,\n \"acc_norm\": 0.30158730158730157,\n \"acc_norm_stderr\": 0.041049472699033945\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.047609522856952365,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.047609522856952365\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.4161290322580645,\n \"acc_stderr\": 0.028040981380761547,\n \"acc_norm\": 0.4161290322580645,\n \"acc_norm_stderr\": 0.028040981380761547\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.2660098522167488,\n \"acc_stderr\": 0.03108982600293753,\n \"acc_norm\": 0.2660098522167488,\n \"acc_norm_stderr\": 0.03108982600293753\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939098,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939098\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.4484848484848485,\n \"acc_stderr\": 0.038835659779569286,\n \"acc_norm\": 0.4484848484848485,\n \"acc_norm_stderr\": 0.038835659779569286\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.5454545454545454,\n \"acc_stderr\": 0.03547601494006937,\n \"acc_norm\": 0.5454545454545454,\n \"acc_norm_stderr\": 0.03547601494006937\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.48186528497409326,\n \"acc_stderr\": 0.03606065001832919,\n \"acc_norm\": 0.48186528497409326,\n \"acc_norm_stderr\": 0.03606065001832919\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.35128205128205126,\n \"acc_stderr\": 0.024203665177902796,\n \"acc_norm\": 0.35128205128205126,\n \"acc_norm_stderr\": 0.024203665177902796\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.026719240783712166,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.026719240783712166\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.35714285714285715,\n \"acc_stderr\": 0.031124619309328177,\n \"acc_norm\": 0.35714285714285715,\n \"acc_norm_stderr\": 0.031124619309328177\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2980132450331126,\n \"acc_stderr\": 0.037345356767871984,\n \"acc_norm\": 0.2980132450331126,\n \"acc_norm_stderr\": 0.037345356767871984\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.4972477064220184,\n \"acc_stderr\": 0.02143699835976532,\n \"acc_norm\": 0.4972477064220184,\n \"acc_norm_stderr\": 0.02143699835976532\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.3055555555555556,\n \"acc_stderr\": 0.03141554629402544,\n \"acc_norm\": 0.3055555555555556,\n \"acc_norm_stderr\": 0.03141554629402544\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.46078431372549017,\n \"acc_stderr\": 0.03498501649369527,\n \"acc_norm\": 0.46078431372549017,\n \"acc_norm_stderr\": 0.03498501649369527\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.5611814345991561,\n \"acc_stderr\": 0.032302649315470375,\n \"acc_norm\": 0.5611814345991561,\n \"acc_norm_stderr\": 0.032302649315470375\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.4977578475336323,\n \"acc_stderr\": 0.033557465352232634,\n \"acc_norm\": 0.4977578475336323,\n \"acc_norm_stderr\": 0.033557465352232634\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.46564885496183206,\n \"acc_stderr\": 0.04374928560599738,\n \"acc_norm\": 0.46564885496183206,\n \"acc_norm_stderr\": 0.04374928560599738\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.47107438016528924,\n \"acc_stderr\": 0.04556710331269498,\n \"acc_norm\": 0.47107438016528924,\n \"acc_norm_stderr\": 0.04556710331269498\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.4166666666666667,\n \"acc_stderr\": 0.04766075165356461,\n \"acc_norm\": 0.4166666666666667,\n \"acc_norm_stderr\": 0.04766075165356461\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.4110429447852761,\n \"acc_stderr\": 0.038656978537853624,\n \"acc_norm\": 0.4110429447852761,\n \"acc_norm_stderr\": 0.038656978537853624\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3125,\n \"acc_stderr\": 0.043994650575715215,\n \"acc_norm\": 0.3125,\n \"acc_norm_stderr\": 0.043994650575715215\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.5533980582524272,\n \"acc_stderr\": 0.04922424153458933,\n \"acc_norm\": 0.5533980582524272,\n \"acc_norm_stderr\": 0.04922424153458933\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.5341880341880342,\n \"acc_stderr\": 0.03267942734081228,\n \"acc_norm\": 0.5341880341880342,\n \"acc_norm_stderr\": 0.03267942734081228\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.5261813537675607,\n \"acc_stderr\": 0.01785543455404199,\n \"acc_norm\": 0.5261813537675607,\n \"acc_norm_stderr\": 0.01785543455404199\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.4277456647398844,\n \"acc_stderr\": 0.02663653974111608,\n \"acc_norm\": 0.4277456647398844,\n \"acc_norm_stderr\": 0.02663653974111608\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.25139664804469275,\n \"acc_stderr\": 0.014508979453553984,\n \"acc_norm\": 0.25139664804469275,\n \"acc_norm_stderr\": 0.014508979453553984\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.42810457516339867,\n \"acc_stderr\": 0.028332397483664274,\n \"acc_norm\": 0.42810457516339867,\n \"acc_norm_stderr\": 0.028332397483664274\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.40192926045016075,\n \"acc_stderr\": 0.027846476005930473,\n \"acc_norm\": 0.40192926045016075,\n \"acc_norm_stderr\": 0.027846476005930473\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.41358024691358025,\n \"acc_stderr\": 0.027402042040269952,\n \"acc_norm\": 0.41358024691358025,\n \"acc_norm_stderr\": 0.027402042040269952\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.3049645390070922,\n \"acc_stderr\": 0.027464708442022128,\n \"acc_norm\": 0.3049645390070922,\n \"acc_norm_stderr\": 0.027464708442022128\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.30964797913950454,\n \"acc_stderr\": 0.01180859826250332,\n \"acc_norm\": 0.30964797913950454,\n \"acc_norm_stderr\": 0.01180859826250332\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.3786764705882353,\n \"acc_stderr\": 0.029465133639776132,\n \"acc_norm\": 0.3786764705882353,\n \"acc_norm_stderr\": 0.029465133639776132\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.36437908496732024,\n \"acc_stderr\": 0.019469518221573702,\n \"acc_norm\": 0.36437908496732024,\n \"acc_norm_stderr\": 0.019469518221573702\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.04789131426105757,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.04789131426105757\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.32653061224489793,\n \"acc_stderr\": 0.030021056238440286,\n \"acc_norm\": 0.32653061224489793,\n \"acc_norm_stderr\": 0.030021056238440286\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.4527363184079602,\n \"acc_stderr\": 0.035197027175769155,\n \"acc_norm\": 0.4527363184079602,\n \"acc_norm_stderr\": 0.035197027175769155\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.39156626506024095,\n \"acc_stderr\": 0.03799857454479636,\n \"acc_norm\": 0.39156626506024095,\n \"acc_norm_stderr\": 0.03799857454479636\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.49707602339181284,\n \"acc_stderr\": 0.03834759370936839,\n \"acc_norm\": 0.49707602339181284,\n \"acc_norm_stderr\": 0.03834759370936839\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2252141982864137,\n \"mc1_stderr\": 0.014623240768023498,\n \"mc2\": 0.3795634078796446,\n \"mc2_stderr\": 0.014273839655133331\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6574585635359116,\n \"acc_stderr\": 0.013337483579075923\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/mediocredev/open-llama-3b-v2-instruct", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|arc:challenge|25_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|gsm8k|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hellaswag|10_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T15-28-20.399841.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["**/details_harness|winogrande|5_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T15-28-20.399841.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T15_28_20.399841", "path": ["results_2023-12-16T15-28-20.399841.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T15-28-20.399841.parquet"]}]}]}
2023-12-16T15:31:14+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of mediocredev/open-llama-3b-v2-instruct Dataset automatically created during the evaluation run of model mediocredev/open-llama-3b-v2-instruct on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T15:28:20.399841(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of mediocredev/open-llama-3b-v2-instruct\n\n\n\nDataset automatically created during the evaluation run of model mediocredev/open-llama-3b-v2-instruct on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T15:28:20.399841(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of mediocredev/open-llama-3b-v2-instruct\n\n\n\nDataset automatically created during the evaluation run of model mediocredev/open-llama-3b-v2-instruct on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T15:28:20.399841(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 191, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of mediocredev/open-llama-3b-v2-instruct\n\n\n\nDataset automatically created during the evaluation run of model mediocredev/open-llama-3b-v2-instruct on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T15:28:20.399841(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
ba3512bf6fd24ac80d37d3f071c8b890ab79039d
# Dataset Card for Evaluation run of jan-ai/Pandora-10.7B-v1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [jan-ai/Pandora-10.7B-v1](https://huggingface.co/jan-ai/Pandora-10.7B-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_jan-ai__Pandora-10.7B-v1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T15:32:12.267784](https://huggingface.co/datasets/open-llm-leaderboard/details_jan-ai__Pandora-10.7B-v1/blob/main/results_2023-12-16T15-32-12.267784.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6531114574888794, "acc_stderr": 0.03214413202447322, "acc_norm": 0.6542778729809938, "acc_norm_stderr": 0.03279516707564304, "mc1": 0.5924112607099143, "mc1_stderr": 0.01720194923455311, "mc2": 0.706747043063352, "mc2_stderr": 0.014995848324388153 }, "harness|arc:challenge|25": { "acc": 0.6808873720136519, "acc_stderr": 0.013621696119173307, "acc_norm": 0.7107508532423208, "acc_norm_stderr": 0.013250012579393441 }, "harness|hellaswag|10": { "acc": 0.6994622585142402, "acc_stderr": 0.004575548557275207, "acc_norm": 0.8706432981477793, "acc_norm_stderr": 0.0033490845685472614 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6074074074074074, "acc_stderr": 0.0421850621536888, "acc_norm": 0.6074074074074074, "acc_norm_stderr": 0.0421850621536888 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6710526315789473, "acc_stderr": 0.03823428969926604, "acc_norm": 0.6710526315789473, "acc_norm_stderr": 0.03823428969926604 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.59, "acc_stderr": 0.04943110704237102, "acc_norm": 0.59, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7018867924528301, "acc_stderr": 0.028152837942493864, "acc_norm": 0.7018867924528301, "acc_norm_stderr": 0.028152837942493864 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7569444444444444, "acc_stderr": 0.0358687928008034, "acc_norm": 0.7569444444444444, "acc_norm_stderr": 0.0358687928008034 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.49, "acc_stderr": 0.05024183937956911, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956911 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.04725815626252603, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252603 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6763005780346821, "acc_stderr": 0.035676037996391706, "acc_norm": 0.6763005780346821, "acc_norm_stderr": 0.035676037996391706 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4215686274509804, "acc_stderr": 0.04913595201274498, "acc_norm": 0.4215686274509804, "acc_norm_stderr": 0.04913595201274498 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.78, "acc_stderr": 0.04163331998932263, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932263 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5829787234042553, "acc_stderr": 0.03223276266711712, "acc_norm": 0.5829787234042553, "acc_norm_stderr": 0.03223276266711712 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.593103448275862, "acc_stderr": 0.04093793981266236, "acc_norm": 0.593103448275862, "acc_norm_stderr": 0.04093793981266236 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3968253968253968, "acc_stderr": 0.025197101074246483, "acc_norm": 0.3968253968253968, "acc_norm_stderr": 0.025197101074246483 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.46825396825396826, "acc_stderr": 0.04463112720677172, "acc_norm": 0.46825396825396826, "acc_norm_stderr": 0.04463112720677172 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.04793724854411019, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411019 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8032258064516129, "acc_stderr": 0.022616409420742025, "acc_norm": 0.8032258064516129, "acc_norm_stderr": 0.022616409420742025 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4975369458128079, "acc_stderr": 0.03517945038691063, "acc_norm": 0.4975369458128079, "acc_norm_stderr": 0.03517945038691063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.73, "acc_stderr": 0.04461960433384739, "acc_norm": 0.73, "acc_norm_stderr": 0.04461960433384739 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7575757575757576, "acc_stderr": 0.03346409881055953, "acc_norm": 0.7575757575757576, "acc_norm_stderr": 0.03346409881055953 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7878787878787878, "acc_stderr": 0.02912652283458682, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.02912652283458682 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8808290155440415, "acc_stderr": 0.02338193534812142, "acc_norm": 0.8808290155440415, "acc_norm_stderr": 0.02338193534812142 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6871794871794872, "acc_stderr": 0.023507579020645365, "acc_norm": 0.6871794871794872, "acc_norm_stderr": 0.023507579020645365 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.37407407407407406, "acc_stderr": 0.029502861128955293, "acc_norm": 0.37407407407407406, "acc_norm_stderr": 0.029502861128955293 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7352941176470589, "acc_stderr": 0.02865749128507198, "acc_norm": 0.7352941176470589, "acc_norm_stderr": 0.02865749128507198 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3708609271523179, "acc_stderr": 0.03943966699183629, "acc_norm": 0.3708609271523179, "acc_norm_stderr": 0.03943966699183629 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8568807339449541, "acc_stderr": 0.01501446249716859, "acc_norm": 0.8568807339449541, "acc_norm_stderr": 0.01501446249716859 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5648148148148148, "acc_stderr": 0.03381200005643526, "acc_norm": 0.5648148148148148, "acc_norm_stderr": 0.03381200005643526 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8284313725490197, "acc_stderr": 0.026460569561240644, "acc_norm": 0.8284313725490197, "acc_norm_stderr": 0.026460569561240644 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.810126582278481, "acc_stderr": 0.02553010046023349, "acc_norm": 0.810126582278481, "acc_norm_stderr": 0.02553010046023349 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6816143497757847, "acc_stderr": 0.03126580522513713, "acc_norm": 0.6816143497757847, "acc_norm_stderr": 0.03126580522513713 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7633587786259542, "acc_stderr": 0.03727673575596913, "acc_norm": 0.7633587786259542, "acc_norm_stderr": 0.03727673575596913 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7768595041322314, "acc_stderr": 0.03800754475228733, "acc_norm": 0.7768595041322314, "acc_norm_stderr": 0.03800754475228733 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7870370370370371, "acc_stderr": 0.0395783547198098, "acc_norm": 0.7870370370370371, "acc_norm_stderr": 0.0395783547198098 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7668711656441718, "acc_stderr": 0.0332201579577674, "acc_norm": 0.7668711656441718, "acc_norm_stderr": 0.0332201579577674 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.44642857142857145, "acc_stderr": 0.04718471485219588, "acc_norm": 0.44642857142857145, "acc_norm_stderr": 0.04718471485219588 }, "harness|hendrycksTest-management|5": { "acc": 0.7766990291262136, "acc_stderr": 0.04123553189891431, "acc_norm": 0.7766990291262136, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8675213675213675, "acc_stderr": 0.02220930907316562, "acc_norm": 0.8675213675213675, "acc_norm_stderr": 0.02220930907316562 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8263090676883781, "acc_stderr": 0.013547415658662255, "acc_norm": 0.8263090676883781, "acc_norm_stderr": 0.013547415658662255 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7109826589595376, "acc_stderr": 0.02440517393578323, "acc_norm": 0.7109826589595376, "acc_norm_stderr": 0.02440517393578323 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.44692737430167595, "acc_stderr": 0.016628030039647614, "acc_norm": 0.44692737430167595, "acc_norm_stderr": 0.016628030039647614 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7483660130718954, "acc_stderr": 0.0248480182638752, "acc_norm": 0.7483660130718954, "acc_norm_stderr": 0.0248480182638752 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7138263665594855, "acc_stderr": 0.025670259242188933, "acc_norm": 0.7138263665594855, "acc_norm_stderr": 0.025670259242188933 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7345679012345679, "acc_stderr": 0.024569223600460845, "acc_norm": 0.7345679012345679, "acc_norm_stderr": 0.024569223600460845 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5070921985815603, "acc_stderr": 0.02982449855912901, "acc_norm": 0.5070921985815603, "acc_norm_stderr": 0.02982449855912901 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4595827900912647, "acc_stderr": 0.012728446067669971, "acc_norm": 0.4595827900912647, "acc_norm_stderr": 0.012728446067669971 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7022058823529411, "acc_stderr": 0.027778298701545443, "acc_norm": 0.7022058823529411, "acc_norm_stderr": 0.027778298701545443 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6584967320261438, "acc_stderr": 0.019184639328092487, "acc_norm": 0.6584967320261438, "acc_norm_stderr": 0.019184639328092487 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7090909090909091, "acc_stderr": 0.04350271442923243, "acc_norm": 0.7090909090909091, "acc_norm_stderr": 0.04350271442923243 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6979591836734694, "acc_stderr": 0.0293936093198798, "acc_norm": 0.6979591836734694, "acc_norm_stderr": 0.0293936093198798 }, "harness|hendrycksTest-sociology|5": { "acc": 0.845771144278607, "acc_stderr": 0.025538433368578334, "acc_norm": 0.845771144278607, "acc_norm_stderr": 0.025538433368578334 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.0348735088019777, "acc_norm": 0.86, "acc_norm_stderr": 0.0348735088019777 }, "harness|hendrycksTest-virology|5": { "acc": 0.536144578313253, "acc_stderr": 0.038823108508905954, "acc_norm": 0.536144578313253, "acc_norm_stderr": 0.038823108508905954 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8362573099415205, "acc_stderr": 0.028380919596145866, "acc_norm": 0.8362573099415205, "acc_norm_stderr": 0.028380919596145866 }, "harness|truthfulqa:mc|0": { "mc1": 0.5924112607099143, "mc1_stderr": 0.01720194923455311, "mc2": 0.706747043063352, "mc2_stderr": 0.014995848324388153 }, "harness|winogrande|5": { "acc": 0.813733228097869, "acc_stderr": 0.010941877955676211 }, "harness|gsm8k|5": { "acc": 0.624715693707354, "acc_stderr": 0.013337170545742924 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_jan-ai__Pandora-10.7B-v1
[ "region:us" ]
2023-12-16T15:35:08+00:00
{"pretty_name": "Evaluation run of jan-ai/Pandora-10.7B-v1", "dataset_summary": "Dataset automatically created during the evaluation run of model [jan-ai/Pandora-10.7B-v1](https://huggingface.co/jan-ai/Pandora-10.7B-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jan-ai__Pandora-10.7B-v1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T15:32:12.267784](https://huggingface.co/datasets/open-llm-leaderboard/details_jan-ai__Pandora-10.7B-v1/blob/main/results_2023-12-16T15-32-12.267784.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6531114574888794,\n \"acc_stderr\": 0.03214413202447322,\n \"acc_norm\": 0.6542778729809938,\n \"acc_norm_stderr\": 0.03279516707564304,\n \"mc1\": 0.5924112607099143,\n \"mc1_stderr\": 0.01720194923455311,\n \"mc2\": 0.706747043063352,\n \"mc2_stderr\": 0.014995848324388153\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6808873720136519,\n \"acc_stderr\": 0.013621696119173307,\n \"acc_norm\": 0.7107508532423208,\n \"acc_norm_stderr\": 0.013250012579393441\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6994622585142402,\n \"acc_stderr\": 0.004575548557275207,\n \"acc_norm\": 0.8706432981477793,\n \"acc_norm_stderr\": 0.0033490845685472614\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6074074074074074,\n \"acc_stderr\": 0.0421850621536888,\n \"acc_norm\": 0.6074074074074074,\n \"acc_norm_stderr\": 0.0421850621536888\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6710526315789473,\n \"acc_stderr\": 0.03823428969926604,\n \"acc_norm\": 0.6710526315789473,\n \"acc_norm_stderr\": 0.03823428969926604\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.59,\n \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7018867924528301,\n \"acc_stderr\": 0.028152837942493864,\n \"acc_norm\": 0.7018867924528301,\n \"acc_norm_stderr\": 0.028152837942493864\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7569444444444444,\n \"acc_stderr\": 0.0358687928008034,\n \"acc_norm\": 0.7569444444444444,\n \"acc_norm_stderr\": 0.0358687928008034\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956911,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956911\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252603,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252603\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6763005780346821,\n \"acc_stderr\": 0.035676037996391706,\n \"acc_norm\": 0.6763005780346821,\n \"acc_norm_stderr\": 0.035676037996391706\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4215686274509804,\n \"acc_stderr\": 0.04913595201274498,\n \"acc_norm\": 0.4215686274509804,\n \"acc_norm_stderr\": 0.04913595201274498\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932263,\n \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.04163331998932263\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5829787234042553,\n \"acc_stderr\": 0.03223276266711712,\n \"acc_norm\": 0.5829787234042553,\n \"acc_norm_stderr\": 0.03223276266711712\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.593103448275862,\n \"acc_stderr\": 0.04093793981266236,\n \"acc_norm\": 0.593103448275862,\n \"acc_norm_stderr\": 0.04093793981266236\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3968253968253968,\n \"acc_stderr\": 0.025197101074246483,\n \"acc_norm\": 0.3968253968253968,\n \"acc_norm_stderr\": 0.025197101074246483\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.46825396825396826,\n \"acc_stderr\": 0.04463112720677172,\n \"acc_norm\": 0.46825396825396826,\n \"acc_norm_stderr\": 0.04463112720677172\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411019,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411019\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8032258064516129,\n \"acc_stderr\": 0.022616409420742025,\n \"acc_norm\": 0.8032258064516129,\n \"acc_norm_stderr\": 0.022616409420742025\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4975369458128079,\n \"acc_stderr\": 0.03517945038691063,\n \"acc_norm\": 0.4975369458128079,\n \"acc_norm_stderr\": 0.03517945038691063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.04461960433384739,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.04461960433384739\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7575757575757576,\n \"acc_stderr\": 0.03346409881055953,\n \"acc_norm\": 0.7575757575757576,\n \"acc_norm_stderr\": 0.03346409881055953\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.02912652283458682,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.02912652283458682\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8808290155440415,\n \"acc_stderr\": 0.02338193534812142,\n \"acc_norm\": 0.8808290155440415,\n \"acc_norm_stderr\": 0.02338193534812142\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6871794871794872,\n \"acc_stderr\": 0.023507579020645365,\n \"acc_norm\": 0.6871794871794872,\n \"acc_norm_stderr\": 0.023507579020645365\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.37407407407407406,\n \"acc_stderr\": 0.029502861128955293,\n \"acc_norm\": 0.37407407407407406,\n \"acc_norm_stderr\": 0.029502861128955293\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7352941176470589,\n \"acc_stderr\": 0.02865749128507198,\n \"acc_norm\": 0.7352941176470589,\n \"acc_norm_stderr\": 0.02865749128507198\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3708609271523179,\n \"acc_stderr\": 0.03943966699183629,\n \"acc_norm\": 0.3708609271523179,\n \"acc_norm_stderr\": 0.03943966699183629\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8568807339449541,\n \"acc_stderr\": 0.01501446249716859,\n \"acc_norm\": 0.8568807339449541,\n \"acc_norm_stderr\": 0.01501446249716859\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5648148148148148,\n \"acc_stderr\": 0.03381200005643526,\n \"acc_norm\": 0.5648148148148148,\n \"acc_norm_stderr\": 0.03381200005643526\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8284313725490197,\n \"acc_stderr\": 0.026460569561240644,\n \"acc_norm\": 0.8284313725490197,\n \"acc_norm_stderr\": 0.026460569561240644\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.810126582278481,\n \"acc_stderr\": 0.02553010046023349,\n \"acc_norm\": 0.810126582278481,\n \"acc_norm_stderr\": 0.02553010046023349\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6816143497757847,\n \"acc_stderr\": 0.03126580522513713,\n \"acc_norm\": 0.6816143497757847,\n \"acc_norm_stderr\": 0.03126580522513713\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7633587786259542,\n \"acc_stderr\": 0.03727673575596913,\n \"acc_norm\": 0.7633587786259542,\n \"acc_norm_stderr\": 0.03727673575596913\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228733,\n \"acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228733\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7870370370370371,\n \"acc_stderr\": 0.0395783547198098,\n \"acc_norm\": 0.7870370370370371,\n \"acc_norm_stderr\": 0.0395783547198098\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7668711656441718,\n \"acc_stderr\": 0.0332201579577674,\n \"acc_norm\": 0.7668711656441718,\n \"acc_norm_stderr\": 0.0332201579577674\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.44642857142857145,\n \"acc_stderr\": 0.04718471485219588,\n \"acc_norm\": 0.44642857142857145,\n \"acc_norm_stderr\": 0.04718471485219588\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8675213675213675,\n \"acc_stderr\": 0.02220930907316562,\n \"acc_norm\": 0.8675213675213675,\n \"acc_norm_stderr\": 0.02220930907316562\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8263090676883781,\n \"acc_stderr\": 0.013547415658662255,\n \"acc_norm\": 0.8263090676883781,\n \"acc_norm_stderr\": 0.013547415658662255\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7109826589595376,\n \"acc_stderr\": 0.02440517393578323,\n \"acc_norm\": 0.7109826589595376,\n \"acc_norm_stderr\": 0.02440517393578323\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.44692737430167595,\n \"acc_stderr\": 0.016628030039647614,\n \"acc_norm\": 0.44692737430167595,\n \"acc_norm_stderr\": 0.016628030039647614\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7483660130718954,\n \"acc_stderr\": 0.0248480182638752,\n \"acc_norm\": 0.7483660130718954,\n \"acc_norm_stderr\": 0.0248480182638752\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7138263665594855,\n \"acc_stderr\": 0.025670259242188933,\n \"acc_norm\": 0.7138263665594855,\n \"acc_norm_stderr\": 0.025670259242188933\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7345679012345679,\n \"acc_stderr\": 0.024569223600460845,\n \"acc_norm\": 0.7345679012345679,\n \"acc_norm_stderr\": 0.024569223600460845\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5070921985815603,\n \"acc_stderr\": 0.02982449855912901,\n \"acc_norm\": 0.5070921985815603,\n \"acc_norm_stderr\": 0.02982449855912901\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4595827900912647,\n \"acc_stderr\": 0.012728446067669971,\n \"acc_norm\": 0.4595827900912647,\n \"acc_norm_stderr\": 0.012728446067669971\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7022058823529411,\n \"acc_stderr\": 0.027778298701545443,\n \"acc_norm\": 0.7022058823529411,\n \"acc_norm_stderr\": 0.027778298701545443\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6584967320261438,\n \"acc_stderr\": 0.019184639328092487,\n \"acc_norm\": 0.6584967320261438,\n \"acc_norm_stderr\": 0.019184639328092487\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7090909090909091,\n \"acc_stderr\": 0.04350271442923243,\n \"acc_norm\": 0.7090909090909091,\n \"acc_norm_stderr\": 0.04350271442923243\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6979591836734694,\n \"acc_stderr\": 0.0293936093198798,\n \"acc_norm\": 0.6979591836734694,\n \"acc_norm_stderr\": 0.0293936093198798\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.845771144278607,\n \"acc_stderr\": 0.025538433368578334,\n \"acc_norm\": 0.845771144278607,\n \"acc_norm_stderr\": 0.025538433368578334\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.0348735088019777,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.0348735088019777\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.536144578313253,\n \"acc_stderr\": 0.038823108508905954,\n \"acc_norm\": 0.536144578313253,\n \"acc_norm_stderr\": 0.038823108508905954\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5924112607099143,\n \"mc1_stderr\": 0.01720194923455311,\n \"mc2\": 0.706747043063352,\n \"mc2_stderr\": 0.014995848324388153\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.813733228097869,\n \"acc_stderr\": 0.010941877955676211\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.624715693707354,\n \"acc_stderr\": 0.013337170545742924\n }\n}\n```", "repo_url": "https://huggingface.co/jan-ai/Pandora-10.7B-v1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|arc:challenge|25_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|gsm8k|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hellaswag|10_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T15-32-12.267784.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["**/details_harness|winogrande|5_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T15-32-12.267784.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T15_32_12.267784", "path": ["results_2023-12-16T15-32-12.267784.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T15-32-12.267784.parquet"]}]}]}
2023-12-16T15:35:51+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of jan-ai/Pandora-10.7B-v1 Dataset automatically created during the evaluation run of model jan-ai/Pandora-10.7B-v1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T15:32:12.267784(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of jan-ai/Pandora-10.7B-v1\n\n\n\nDataset automatically created during the evaluation run of model jan-ai/Pandora-10.7B-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T15:32:12.267784(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of jan-ai/Pandora-10.7B-v1\n\n\n\nDataset automatically created during the evaluation run of model jan-ai/Pandora-10.7B-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T15:32:12.267784(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 187, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of jan-ai/Pandora-10.7B-v1\n\n\n\nDataset automatically created during the evaluation run of model jan-ai/Pandora-10.7B-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T15:32:12.267784(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
81010e7cccfeb3aad89a4cbc550817b67f55a7dc
# Dataset Card for Evaluation run of jan-ai/Solar-10.7B-SLERP <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [jan-ai/Solar-10.7B-SLERP](https://huggingface.co/jan-ai/Solar-10.7B-SLERP) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_jan-ai__Solar-10.7B-SLERP", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T15:35:26.592676](https://huggingface.co/datasets/open-llm-leaderboard/details_jan-ai__Solar-10.7B-SLERP/blob/main/results_2023-12-16T15-35-26.592676.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6608479464480653, "acc_stderr": 0.031968087444665505, "acc_norm": 0.6623335219673708, "acc_norm_stderr": 0.03261535081063273, "mc1": 0.5079559363525091, "mc1_stderr": 0.01750128507455183, "mc2": 0.6571842191607326, "mc2_stderr": 0.015609617120580309 }, "harness|arc:challenge|25": { "acc": 0.6791808873720137, "acc_stderr": 0.013640943091946528, "acc_norm": 0.7073378839590444, "acc_norm_stderr": 0.013295916103619422 }, "harness|hellaswag|10": { "acc": 0.7035451105357499, "acc_stderr": 0.004557606227194303, "acc_norm": 0.8787094204341764, "acc_norm_stderr": 0.003257974593789937 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6296296296296297, "acc_stderr": 0.04171654161354543, "acc_norm": 0.6296296296296297, "acc_norm_stderr": 0.04171654161354543 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6907894736842105, "acc_stderr": 0.037610708698674805, "acc_norm": 0.6907894736842105, "acc_norm_stderr": 0.037610708698674805 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7169811320754716, "acc_stderr": 0.027724236492700918, "acc_norm": 0.7169811320754716, "acc_norm_stderr": 0.027724236492700918 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.75, "acc_stderr": 0.03621034121889507, "acc_norm": 0.75, "acc_norm_stderr": 0.03621034121889507 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6242774566473989, "acc_stderr": 0.036928207672648664, "acc_norm": 0.6242774566473989, "acc_norm_stderr": 0.036928207672648664 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107223, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107223 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.04229525846816507, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816507 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5914893617021276, "acc_stderr": 0.032134180267015755, "acc_norm": 0.5914893617021276, "acc_norm_stderr": 0.032134180267015755 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5263157894736842, "acc_stderr": 0.046970851366478626, "acc_norm": 0.5263157894736842, "acc_norm_stderr": 0.046970851366478626 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5517241379310345, "acc_stderr": 0.04144311810878152, "acc_norm": 0.5517241379310345, "acc_norm_stderr": 0.04144311810878152 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4894179894179894, "acc_stderr": 0.02574554227604548, "acc_norm": 0.4894179894179894, "acc_norm_stderr": 0.02574554227604548 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42063492063492064, "acc_stderr": 0.04415438226743744, "acc_norm": 0.42063492063492064, "acc_norm_stderr": 0.04415438226743744 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7935483870967742, "acc_stderr": 0.023025899617188712, "acc_norm": 0.7935483870967742, "acc_norm_stderr": 0.023025899617188712 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.49261083743842365, "acc_stderr": 0.035176035403610084, "acc_norm": 0.49261083743842365, "acc_norm_stderr": 0.035176035403610084 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8242424242424242, "acc_stderr": 0.02972094300622445, "acc_norm": 0.8242424242424242, "acc_norm_stderr": 0.02972094300622445 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8080808080808081, "acc_stderr": 0.02805779167298902, "acc_norm": 0.8080808080808081, "acc_norm_stderr": 0.02805779167298902 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.02150024957603344, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.02150024957603344 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6564102564102564, "acc_stderr": 0.024078696580635477, "acc_norm": 0.6564102564102564, "acc_norm_stderr": 0.024078696580635477 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3592592592592593, "acc_stderr": 0.029252905927251976, "acc_norm": 0.3592592592592593, "acc_norm_stderr": 0.029252905927251976 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7016806722689075, "acc_stderr": 0.029719142876342853, "acc_norm": 0.7016806722689075, "acc_norm_stderr": 0.029719142876342853 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3576158940397351, "acc_stderr": 0.03913453431177258, "acc_norm": 0.3576158940397351, "acc_norm_stderr": 0.03913453431177258 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8385321100917431, "acc_stderr": 0.015776239256163248, "acc_norm": 0.8385321100917431, "acc_norm_stderr": 0.015776239256163248 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5648148148148148, "acc_stderr": 0.033812000056435254, "acc_norm": 0.5648148148148148, "acc_norm_stderr": 0.033812000056435254 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8284313725490197, "acc_stderr": 0.026460569561240647, "acc_norm": 0.8284313725490197, "acc_norm_stderr": 0.026460569561240647 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8227848101265823, "acc_stderr": 0.024856364184503214, "acc_norm": 0.8227848101265823, "acc_norm_stderr": 0.024856364184503214 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7085201793721974, "acc_stderr": 0.03050028317654585, "acc_norm": 0.7085201793721974, "acc_norm_stderr": 0.03050028317654585 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7633587786259542, "acc_stderr": 0.03727673575596914, "acc_norm": 0.7633587786259542, "acc_norm_stderr": 0.03727673575596914 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8264462809917356, "acc_stderr": 0.03457272836917671, "acc_norm": 0.8264462809917356, "acc_norm_stderr": 0.03457272836917671 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8148148148148148, "acc_stderr": 0.03755265865037182, "acc_norm": 0.8148148148148148, "acc_norm_stderr": 0.03755265865037182 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7607361963190185, "acc_stderr": 0.0335195387952127, "acc_norm": 0.7607361963190185, "acc_norm_stderr": 0.0335195387952127 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.49107142857142855, "acc_stderr": 0.04745033255489123, "acc_norm": 0.49107142857142855, "acc_norm_stderr": 0.04745033255489123 }, "harness|hendrycksTest-management|5": { "acc": 0.7864077669902912, "acc_stderr": 0.040580420156460344, "acc_norm": 0.7864077669902912, "acc_norm_stderr": 0.040580420156460344 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8803418803418803, "acc_stderr": 0.021262719400406964, "acc_norm": 0.8803418803418803, "acc_norm_stderr": 0.021262719400406964 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.822477650063857, "acc_stderr": 0.01366423099583483, "acc_norm": 0.822477650063857, "acc_norm_stderr": 0.01366423099583483 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7312138728323699, "acc_stderr": 0.023868003262500104, "acc_norm": 0.7312138728323699, "acc_norm_stderr": 0.023868003262500104 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4770949720670391, "acc_stderr": 0.016704945740326188, "acc_norm": 0.4770949720670391, "acc_norm_stderr": 0.016704945740326188 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7549019607843137, "acc_stderr": 0.024630048979824775, "acc_norm": 0.7549019607843137, "acc_norm_stderr": 0.024630048979824775 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7395498392282959, "acc_stderr": 0.024926723224845543, "acc_norm": 0.7395498392282959, "acc_norm_stderr": 0.024926723224845543 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.75, "acc_stderr": 0.02409347123262133, "acc_norm": 0.75, "acc_norm_stderr": 0.02409347123262133 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4929078014184397, "acc_stderr": 0.02982449855912901, "acc_norm": 0.4929078014184397, "acc_norm_stderr": 0.02982449855912901 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.48435462842242505, "acc_stderr": 0.012763982838120948, "acc_norm": 0.48435462842242505, "acc_norm_stderr": 0.012763982838120948 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6985294117647058, "acc_stderr": 0.027875982114273168, "acc_norm": 0.6985294117647058, "acc_norm_stderr": 0.027875982114273168 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6797385620915033, "acc_stderr": 0.018875682938069446, "acc_norm": 0.6797385620915033, "acc_norm_stderr": 0.018875682938069446 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7387755102040816, "acc_stderr": 0.02812342933514278, "acc_norm": 0.7387755102040816, "acc_norm_stderr": 0.02812342933514278 }, "harness|hendrycksTest-sociology|5": { "acc": 0.845771144278607, "acc_stderr": 0.025538433368578327, "acc_norm": 0.845771144278607, "acc_norm_stderr": 0.025538433368578327 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.0358870281282637, "acc_norm": 0.85, "acc_norm_stderr": 0.0358870281282637 }, "harness|hendrycksTest-virology|5": { "acc": 0.5301204819277109, "acc_stderr": 0.03885425420866767, "acc_norm": 0.5301204819277109, "acc_norm_stderr": 0.03885425420866767 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.028782108105401705, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.028782108105401705 }, "harness|truthfulqa:mc|0": { "mc1": 0.5079559363525091, "mc1_stderr": 0.01750128507455183, "mc2": 0.6571842191607326, "mc2_stderr": 0.015609617120580309 }, "harness|winogrande|5": { "acc": 0.824782951854775, "acc_stderr": 0.010684179227706163 }, "harness|gsm8k|5": { "acc": 0.6125852918877938, "acc_stderr": 0.013418798447827378 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_jan-ai__Solar-10.7B-SLERP
[ "region:us" ]
2023-12-16T15:38:22+00:00
{"pretty_name": "Evaluation run of jan-ai/Solar-10.7B-SLERP", "dataset_summary": "Dataset automatically created during the evaluation run of model [jan-ai/Solar-10.7B-SLERP](https://huggingface.co/jan-ai/Solar-10.7B-SLERP) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jan-ai__Solar-10.7B-SLERP\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T15:35:26.592676](https://huggingface.co/datasets/open-llm-leaderboard/details_jan-ai__Solar-10.7B-SLERP/blob/main/results_2023-12-16T15-35-26.592676.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6608479464480653,\n \"acc_stderr\": 0.031968087444665505,\n \"acc_norm\": 0.6623335219673708,\n \"acc_norm_stderr\": 0.03261535081063273,\n \"mc1\": 0.5079559363525091,\n \"mc1_stderr\": 0.01750128507455183,\n \"mc2\": 0.6571842191607326,\n \"mc2_stderr\": 0.015609617120580309\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6791808873720137,\n \"acc_stderr\": 0.013640943091946528,\n \"acc_norm\": 0.7073378839590444,\n \"acc_norm_stderr\": 0.013295916103619422\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7035451105357499,\n \"acc_stderr\": 0.004557606227194303,\n \"acc_norm\": 0.8787094204341764,\n \"acc_norm_stderr\": 0.003257974593789937\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6296296296296297,\n \"acc_stderr\": 0.04171654161354543,\n \"acc_norm\": 0.6296296296296297,\n \"acc_norm_stderr\": 0.04171654161354543\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6907894736842105,\n \"acc_stderr\": 0.037610708698674805,\n \"acc_norm\": 0.6907894736842105,\n \"acc_norm_stderr\": 0.037610708698674805\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7169811320754716,\n \"acc_stderr\": 0.027724236492700918,\n \"acc_norm\": 0.7169811320754716,\n \"acc_norm_stderr\": 0.027724236492700918\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.03621034121889507,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.03621034121889507\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6242774566473989,\n \"acc_stderr\": 0.036928207672648664,\n \"acc_norm\": 0.6242774566473989,\n \"acc_norm_stderr\": 0.036928207672648664\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107223,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107223\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816507,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816507\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5914893617021276,\n \"acc_stderr\": 0.032134180267015755,\n \"acc_norm\": 0.5914893617021276,\n \"acc_norm_stderr\": 0.032134180267015755\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5263157894736842,\n \"acc_stderr\": 0.046970851366478626,\n \"acc_norm\": 0.5263157894736842,\n \"acc_norm_stderr\": 0.046970851366478626\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5517241379310345,\n \"acc_stderr\": 0.04144311810878152,\n \"acc_norm\": 0.5517241379310345,\n \"acc_norm_stderr\": 0.04144311810878152\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4894179894179894,\n \"acc_stderr\": 0.02574554227604548,\n \"acc_norm\": 0.4894179894179894,\n \"acc_norm_stderr\": 0.02574554227604548\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42063492063492064,\n \"acc_stderr\": 0.04415438226743744,\n \"acc_norm\": 0.42063492063492064,\n \"acc_norm_stderr\": 0.04415438226743744\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7935483870967742,\n \"acc_stderr\": 0.023025899617188712,\n \"acc_norm\": 0.7935483870967742,\n \"acc_norm_stderr\": 0.023025899617188712\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.49261083743842365,\n \"acc_stderr\": 0.035176035403610084,\n \"acc_norm\": 0.49261083743842365,\n \"acc_norm_stderr\": 0.035176035403610084\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8242424242424242,\n \"acc_stderr\": 0.02972094300622445,\n \"acc_norm\": 0.8242424242424242,\n \"acc_norm_stderr\": 0.02972094300622445\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8080808080808081,\n \"acc_stderr\": 0.02805779167298902,\n \"acc_norm\": 0.8080808080808081,\n \"acc_norm_stderr\": 0.02805779167298902\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.02150024957603344,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.02150024957603344\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6564102564102564,\n \"acc_stderr\": 0.024078696580635477,\n \"acc_norm\": 0.6564102564102564,\n \"acc_norm_stderr\": 0.024078696580635477\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3592592592592593,\n \"acc_stderr\": 0.029252905927251976,\n \"acc_norm\": 0.3592592592592593,\n \"acc_norm_stderr\": 0.029252905927251976\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7016806722689075,\n \"acc_stderr\": 0.029719142876342853,\n \"acc_norm\": 0.7016806722689075,\n \"acc_norm_stderr\": 0.029719142876342853\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3576158940397351,\n \"acc_stderr\": 0.03913453431177258,\n \"acc_norm\": 0.3576158940397351,\n \"acc_norm_stderr\": 0.03913453431177258\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8385321100917431,\n \"acc_stderr\": 0.015776239256163248,\n \"acc_norm\": 0.8385321100917431,\n \"acc_norm_stderr\": 0.015776239256163248\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5648148148148148,\n \"acc_stderr\": 0.033812000056435254,\n \"acc_norm\": 0.5648148148148148,\n \"acc_norm_stderr\": 0.033812000056435254\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8284313725490197,\n \"acc_stderr\": 0.026460569561240647,\n \"acc_norm\": 0.8284313725490197,\n \"acc_norm_stderr\": 0.026460569561240647\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8227848101265823,\n \"acc_stderr\": 0.024856364184503214,\n \"acc_norm\": 0.8227848101265823,\n \"acc_norm_stderr\": 0.024856364184503214\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7085201793721974,\n \"acc_stderr\": 0.03050028317654585,\n \"acc_norm\": 0.7085201793721974,\n \"acc_norm_stderr\": 0.03050028317654585\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7633587786259542,\n \"acc_stderr\": 0.03727673575596914,\n \"acc_norm\": 0.7633587786259542,\n \"acc_norm_stderr\": 0.03727673575596914\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8264462809917356,\n \"acc_stderr\": 0.03457272836917671,\n \"acc_norm\": 0.8264462809917356,\n \"acc_norm_stderr\": 0.03457272836917671\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8148148148148148,\n \"acc_stderr\": 0.03755265865037182,\n \"acc_norm\": 0.8148148148148148,\n \"acc_norm_stderr\": 0.03755265865037182\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7607361963190185,\n \"acc_stderr\": 0.0335195387952127,\n \"acc_norm\": 0.7607361963190185,\n \"acc_norm_stderr\": 0.0335195387952127\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.49107142857142855,\n \"acc_stderr\": 0.04745033255489123,\n \"acc_norm\": 0.49107142857142855,\n \"acc_norm_stderr\": 0.04745033255489123\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7864077669902912,\n \"acc_stderr\": 0.040580420156460344,\n \"acc_norm\": 0.7864077669902912,\n \"acc_norm_stderr\": 0.040580420156460344\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8803418803418803,\n \"acc_stderr\": 0.021262719400406964,\n \"acc_norm\": 0.8803418803418803,\n \"acc_norm_stderr\": 0.021262719400406964\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.822477650063857,\n \"acc_stderr\": 0.01366423099583483,\n \"acc_norm\": 0.822477650063857,\n \"acc_norm_stderr\": 0.01366423099583483\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7312138728323699,\n \"acc_stderr\": 0.023868003262500104,\n \"acc_norm\": 0.7312138728323699,\n \"acc_norm_stderr\": 0.023868003262500104\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4770949720670391,\n \"acc_stderr\": 0.016704945740326188,\n \"acc_norm\": 0.4770949720670391,\n \"acc_norm_stderr\": 0.016704945740326188\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7549019607843137,\n \"acc_stderr\": 0.024630048979824775,\n \"acc_norm\": 0.7549019607843137,\n \"acc_norm_stderr\": 0.024630048979824775\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7395498392282959,\n \"acc_stderr\": 0.024926723224845543,\n \"acc_norm\": 0.7395498392282959,\n \"acc_norm_stderr\": 0.024926723224845543\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.02409347123262133,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.02409347123262133\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4929078014184397,\n \"acc_stderr\": 0.02982449855912901,\n \"acc_norm\": 0.4929078014184397,\n \"acc_norm_stderr\": 0.02982449855912901\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.48435462842242505,\n \"acc_stderr\": 0.012763982838120948,\n \"acc_norm\": 0.48435462842242505,\n \"acc_norm_stderr\": 0.012763982838120948\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6985294117647058,\n \"acc_stderr\": 0.027875982114273168,\n \"acc_norm\": 0.6985294117647058,\n \"acc_norm_stderr\": 0.027875982114273168\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6797385620915033,\n \"acc_stderr\": 0.018875682938069446,\n \"acc_norm\": 0.6797385620915033,\n \"acc_norm_stderr\": 0.018875682938069446\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7387755102040816,\n \"acc_stderr\": 0.02812342933514278,\n \"acc_norm\": 0.7387755102040816,\n \"acc_norm_stderr\": 0.02812342933514278\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.845771144278607,\n \"acc_stderr\": 0.025538433368578327,\n \"acc_norm\": 0.845771144278607,\n \"acc_norm_stderr\": 0.025538433368578327\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.0358870281282637,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.0358870281282637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5301204819277109,\n \"acc_stderr\": 0.03885425420866767,\n \"acc_norm\": 0.5301204819277109,\n \"acc_norm_stderr\": 0.03885425420866767\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.028782108105401705,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.028782108105401705\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5079559363525091,\n \"mc1_stderr\": 0.01750128507455183,\n \"mc2\": 0.6571842191607326,\n \"mc2_stderr\": 0.015609617120580309\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.824782951854775,\n \"acc_stderr\": 0.010684179227706163\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6125852918877938,\n \"acc_stderr\": 0.013418798447827378\n }\n}\n```", "repo_url": "https://huggingface.co/jan-ai/Solar-10.7B-SLERP", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|arc:challenge|25_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|gsm8k|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hellaswag|10_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T15-35-26.592676.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["**/details_harness|winogrande|5_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T15-35-26.592676.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T15_35_26.592676", "path": ["results_2023-12-16T15-35-26.592676.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T15-35-26.592676.parquet"]}]}]}
2023-12-16T15:39:03+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of jan-ai/Solar-10.7B-SLERP Dataset automatically created during the evaluation run of model jan-ai/Solar-10.7B-SLERP on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T15:35:26.592676(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of jan-ai/Solar-10.7B-SLERP\n\n\n\nDataset automatically created during the evaluation run of model jan-ai/Solar-10.7B-SLERP on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T15:35:26.592676(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of jan-ai/Solar-10.7B-SLERP\n\n\n\nDataset automatically created during the evaluation run of model jan-ai/Solar-10.7B-SLERP on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T15:35:26.592676(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 189, 66, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of jan-ai/Solar-10.7B-SLERP\n\n\n\nDataset automatically created during the evaluation run of model jan-ai/Solar-10.7B-SLERP on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T15:35:26.592676(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
ffaf49ee7439afe6dbf0653714ceacb9b57ff830
# Кулинарные рецепты с сайта [povarenok.ru](https://www.povarenok.ru) Данные актуальны на 2021-06-16. Парсер, с помощью которого получили датасет, можно найти в [этом репозитории](https://github.com/rogozinushka/povarenok_recipes_parser) Внимание. Согласно [правилам размещения рецептов](https://www.povarenok.ru/wiki/pravilorecept), все права на рецепты принадлежат сайту, так что имейте это в виду, если планируете использовать датасет Датафрейм имеет такую структуру: - url - ссылка на рецепт - name - название рецепта - ingredients - словарь с ингредиентами. Ключ - ингридиент, значение - количество |url|name|ingredients| |---|---|---| |https://www.povarenok.ru/recipes/show/171921/|Омлет с сыром и ветчиной|{'Яйцо куриное': '5 шт', 'Ветчина': '150 г', 'Сыр твердый': '150 г', 'Соль': '1 щепот.', 'Масло сливочное': '10 г', 'Молоко': '50 мл'}| # Culinary recipes from [povarenok.ru](https://www.povarenok.ru) site The data is current for 2021-06-16. The parser used to get the dataset can be found in [this repository] (https://github.com/rogozinushka/povarenok_recipes_parser) Data structure: - url - culinary recipe url - name - culinary recipe name - ingredients - ingredients dict. Key is ingredient, value is amount |url|name|ingredients| |---|---|---| |https://www.povarenok.ru/recipes/show/171921/|Омлет с сыром и ветчиной|{'Яйцо куриное': '5 шт', 'Ветчина': '150 г', 'Сыр твердый': '150 г', 'Соль': '1 щепот.', 'Масло сливочное': '10 г', 'Молоко': '50 мл'}
rogozinushka/povarenok-recipes
[ "language:ru", "region:us" ]
2023-12-16T15:41:30+00:00
{"language": ["ru"]}
2023-12-16T15:43:22+00:00
[]
[ "ru" ]
TAGS #language-Russian #region-us
Кулинарные рецепты с сайта URL ============================== Данные актуальны на 2021-06-16. Парсер, с помощью которого получили датасет, можно найти в этом репозитории Внимание. Согласно правилам размещения рецептов, все права на рецепты принадлежат сайту, так что имейте это в виду, если планируете использовать датасет Датафрейм имеет такую структуру: * url - ссылка на рецепт * name - название рецепта * ingredients - словарь с ингредиентами. Ключ - ингридиент, значение - количество url: URL, name: Омлет с сыром и ветчиной, ingredients: {'Яйцо куриное': '5 шт', 'Ветчина': '150 г', 'Сыр твердый': '150 г', 'Соль': '1 щепот.', 'Масло сливочное': '10 г', 'Молоко': '50 мл'} Culinary recipes from URL site ============================== The data is current for 2021-06-16. The parser used to get the dataset can be found in [this repository] (URL Data structure: * url - culinary recipe url * name - culinary recipe name * ingredients - ingredients dict. Key is ingredient, value is amount url: URL, name: Омлет с сыром и ветчиной, ingredients: {'Яйцо куриное': '5 шт', 'Ветчина': '150 г', 'Сыр твердый': '150 г', 'Соль': '1 щепот.', 'Масло сливочное': '10 г', 'Молоко': '50 мл'}
[]
[ "TAGS\n#language-Russian #region-us \n" ]
[ 11 ]
[ "passage: TAGS\n#language-Russian #region-us \n" ]
1358c89d598a794e527c005a62d3ff58f1f89022
# Dataset Card for Evaluation run of mncai/agiin-13.6B-v0.0 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [mncai/agiin-13.6B-v0.0](https://huggingface.co/mncai/agiin-13.6B-v0.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_mncai__agiin-13.6B-v0.0", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T15:55:21.950393](https://huggingface.co/datasets/open-llm-leaderboard/details_mncai__agiin-13.6B-v0.0/blob/main/results_2023-12-16T15-55-21.950393.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.621527215806331, "acc_stderr": 0.03309044810009566, "acc_norm": 0.6248205476117454, "acc_norm_stderr": 0.03375647243509085, "mc1": 0.5165238678090576, "mc1_stderr": 0.017493940190057723, "mc2": 0.6740086972319943, "mc2_stderr": 0.015471222805293889 }, "harness|arc:challenge|25": { "acc": 0.659556313993174, "acc_stderr": 0.013847460518892973, "acc_norm": 0.6945392491467577, "acc_norm_stderr": 0.013460080478002508 }, "harness|hellaswag|10": { "acc": 0.6858195578570006, "acc_stderr": 0.0046323996774908106, "acc_norm": 0.8658633738299144, "acc_norm_stderr": 0.0034010255178737237 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.562962962962963, "acc_stderr": 0.042849586397534015, "acc_norm": 0.562962962962963, "acc_norm_stderr": 0.042849586397534015 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.625, "acc_stderr": 0.039397364351956274, "acc_norm": 0.625, "acc_norm_stderr": 0.039397364351956274 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6339622641509434, "acc_stderr": 0.029647813539365245, "acc_norm": 0.6339622641509434, "acc_norm_stderr": 0.029647813539365245 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6944444444444444, "acc_stderr": 0.03852084696008534, "acc_norm": 0.6944444444444444, "acc_norm_stderr": 0.03852084696008534 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.51, "acc_stderr": 0.05024183937956911, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956911 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6184971098265896, "acc_stderr": 0.037038511930995215, "acc_norm": 0.6184971098265896, "acc_norm_stderr": 0.037038511930995215 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4117647058823529, "acc_stderr": 0.048971049527263666, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.048971049527263666 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.68, "acc_stderr": 0.04688261722621505, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5531914893617021, "acc_stderr": 0.032500536843658404, "acc_norm": 0.5531914893617021, "acc_norm_stderr": 0.032500536843658404 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4473684210526316, "acc_stderr": 0.04677473004491199, "acc_norm": 0.4473684210526316, "acc_norm_stderr": 0.04677473004491199 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6068965517241379, "acc_stderr": 0.040703290137070705, "acc_norm": 0.6068965517241379, "acc_norm_stderr": 0.040703290137070705 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.41798941798941797, "acc_stderr": 0.025402555503260912, "acc_norm": 0.41798941798941797, "acc_norm_stderr": 0.025402555503260912 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42063492063492064, "acc_stderr": 0.04415438226743744, "acc_norm": 0.42063492063492064, "acc_norm_stderr": 0.04415438226743744 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.37, "acc_stderr": 0.048523658709391, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7354838709677419, "acc_stderr": 0.02509189237885928, "acc_norm": 0.7354838709677419, "acc_norm_stderr": 0.02509189237885928 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4876847290640394, "acc_stderr": 0.035169204442208966, "acc_norm": 0.4876847290640394, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.64, "acc_stderr": 0.04824181513244218, "acc_norm": 0.64, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7878787878787878, "acc_stderr": 0.03192271569548301, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.03192271569548301 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7676767676767676, "acc_stderr": 0.030088629490217487, "acc_norm": 0.7676767676767676, "acc_norm_stderr": 0.030088629490217487 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8393782383419689, "acc_stderr": 0.026499057701397443, "acc_norm": 0.8393782383419689, "acc_norm_stderr": 0.026499057701397443 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6512820512820513, "acc_stderr": 0.02416278028401772, "acc_norm": 0.6512820512820513, "acc_norm_stderr": 0.02416278028401772 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.35185185185185186, "acc_stderr": 0.02911661760608301, "acc_norm": 0.35185185185185186, "acc_norm_stderr": 0.02911661760608301 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6386554621848739, "acc_stderr": 0.03120469122515001, "acc_norm": 0.6386554621848739, "acc_norm_stderr": 0.03120469122515001 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3443708609271523, "acc_stderr": 0.03879687024073327, "acc_norm": 0.3443708609271523, "acc_norm_stderr": 0.03879687024073327 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8201834862385321, "acc_stderr": 0.01646534546739152, "acc_norm": 0.8201834862385321, "acc_norm_stderr": 0.01646534546739152 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5787037037037037, "acc_stderr": 0.033674621388960775, "acc_norm": 0.5787037037037037, "acc_norm_stderr": 0.033674621388960775 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8088235294117647, "acc_stderr": 0.027599174300640766, "acc_norm": 0.8088235294117647, "acc_norm_stderr": 0.027599174300640766 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7890295358649789, "acc_stderr": 0.02655837250266192, "acc_norm": 0.7890295358649789, "acc_norm_stderr": 0.02655837250266192 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6816143497757847, "acc_stderr": 0.03126580522513713, "acc_norm": 0.6816143497757847, "acc_norm_stderr": 0.03126580522513713 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7022900763358778, "acc_stderr": 0.040103589424622034, "acc_norm": 0.7022900763358778, "acc_norm_stderr": 0.040103589424622034 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8099173553719008, "acc_stderr": 0.03581796951709282, "acc_norm": 0.8099173553719008, "acc_norm_stderr": 0.03581796951709282 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.75, "acc_stderr": 0.04186091791394607, "acc_norm": 0.75, "acc_norm_stderr": 0.04186091791394607 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7423312883435583, "acc_stderr": 0.03436150827846917, "acc_norm": 0.7423312883435583, "acc_norm_stderr": 0.03436150827846917 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5, "acc_stderr": 0.04745789978762494, "acc_norm": 0.5, "acc_norm_stderr": 0.04745789978762494 }, "harness|hendrycksTest-management|5": { "acc": 0.7572815533980582, "acc_stderr": 0.04245022486384495, "acc_norm": 0.7572815533980582, "acc_norm_stderr": 0.04245022486384495 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8418803418803419, "acc_stderr": 0.023902325549560417, "acc_norm": 0.8418803418803419, "acc_norm_stderr": 0.023902325549560417 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.64, "acc_stderr": 0.04824181513244218, "acc_norm": 0.64, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7484035759897829, "acc_stderr": 0.015517322365529633, "acc_norm": 0.7484035759897829, "acc_norm_stderr": 0.015517322365529633 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6878612716763006, "acc_stderr": 0.024946792225272314, "acc_norm": 0.6878612716763006, "acc_norm_stderr": 0.024946792225272314 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4692737430167598, "acc_stderr": 0.01669089616194438, "acc_norm": 0.4692737430167598, "acc_norm_stderr": 0.01669089616194438 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6666666666666666, "acc_stderr": 0.02699254433929724, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.02699254433929724 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6784565916398714, "acc_stderr": 0.026527724079528872, "acc_norm": 0.6784565916398714, "acc_norm_stderr": 0.026527724079528872 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6882716049382716, "acc_stderr": 0.02577311116963045, "acc_norm": 0.6882716049382716, "acc_norm_stderr": 0.02577311116963045 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.44680851063829785, "acc_stderr": 0.029658235097666904, "acc_norm": 0.44680851063829785, "acc_norm_stderr": 0.029658235097666904 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.47392438070404175, "acc_stderr": 0.012752858346533133, "acc_norm": 0.47392438070404175, "acc_norm_stderr": 0.012752858346533133 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6397058823529411, "acc_stderr": 0.029163128570670733, "acc_norm": 0.6397058823529411, "acc_norm_stderr": 0.029163128570670733 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6323529411764706, "acc_stderr": 0.019506291693954854, "acc_norm": 0.6323529411764706, "acc_norm_stderr": 0.019506291693954854 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7, "acc_stderr": 0.04389311454644287, "acc_norm": 0.7, "acc_norm_stderr": 0.04389311454644287 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.636734693877551, "acc_stderr": 0.03078905113903081, "acc_norm": 0.636734693877551, "acc_norm_stderr": 0.03078905113903081 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8308457711442786, "acc_stderr": 0.026508590656233264, "acc_norm": 0.8308457711442786, "acc_norm_stderr": 0.026508590656233264 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.81, "acc_stderr": 0.03942772444036623, "acc_norm": 0.81, "acc_norm_stderr": 0.03942772444036623 }, "harness|hendrycksTest-virology|5": { "acc": 0.5120481927710844, "acc_stderr": 0.03891364495835817, "acc_norm": 0.5120481927710844, "acc_norm_stderr": 0.03891364495835817 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.783625730994152, "acc_stderr": 0.031581495393387324, "acc_norm": 0.783625730994152, "acc_norm_stderr": 0.031581495393387324 }, "harness|truthfulqa:mc|0": { "mc1": 0.5165238678090576, "mc1_stderr": 0.017493940190057723, "mc2": 0.6740086972319943, "mc2_stderr": 0.015471222805293889 }, "harness|winogrande|5": { "acc": 0.7868981846882399, "acc_stderr": 0.011508957690722743 }, "harness|gsm8k|5": { "acc": 0.47687642153146326, "acc_stderr": 0.013757748544245331 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_mncai__agiin-13.6B-v0.0
[ "region:us" ]
2023-12-16T15:58:17+00:00
{"pretty_name": "Evaluation run of mncai/agiin-13.6B-v0.0", "dataset_summary": "Dataset automatically created during the evaluation run of model [mncai/agiin-13.6B-v0.0](https://huggingface.co/mncai/agiin-13.6B-v0.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_mncai__agiin-13.6B-v0.0\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T15:55:21.950393](https://huggingface.co/datasets/open-llm-leaderboard/details_mncai__agiin-13.6B-v0.0/blob/main/results_2023-12-16T15-55-21.950393.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.621527215806331,\n \"acc_stderr\": 0.03309044810009566,\n \"acc_norm\": 0.6248205476117454,\n \"acc_norm_stderr\": 0.03375647243509085,\n \"mc1\": 0.5165238678090576,\n \"mc1_stderr\": 0.017493940190057723,\n \"mc2\": 0.6740086972319943,\n \"mc2_stderr\": 0.015471222805293889\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.659556313993174,\n \"acc_stderr\": 0.013847460518892973,\n \"acc_norm\": 0.6945392491467577,\n \"acc_norm_stderr\": 0.013460080478002508\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6858195578570006,\n \"acc_stderr\": 0.0046323996774908106,\n \"acc_norm\": 0.8658633738299144,\n \"acc_norm_stderr\": 0.0034010255178737237\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.562962962962963,\n \"acc_stderr\": 0.042849586397534015,\n \"acc_norm\": 0.562962962962963,\n \"acc_norm_stderr\": 0.042849586397534015\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.625,\n \"acc_stderr\": 0.039397364351956274,\n \"acc_norm\": 0.625,\n \"acc_norm_stderr\": 0.039397364351956274\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6339622641509434,\n \"acc_stderr\": 0.029647813539365245,\n \"acc_norm\": 0.6339622641509434,\n \"acc_norm_stderr\": 0.029647813539365245\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6944444444444444,\n \"acc_stderr\": 0.03852084696008534,\n \"acc_norm\": 0.6944444444444444,\n \"acc_norm_stderr\": 0.03852084696008534\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956911,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956911\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6184971098265896,\n \"acc_stderr\": 0.037038511930995215,\n \"acc_norm\": 0.6184971098265896,\n \"acc_norm_stderr\": 0.037038511930995215\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.048971049527263666,\n \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.048971049527263666\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5531914893617021,\n \"acc_stderr\": 0.032500536843658404,\n \"acc_norm\": 0.5531914893617021,\n \"acc_norm_stderr\": 0.032500536843658404\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4473684210526316,\n \"acc_stderr\": 0.04677473004491199,\n \"acc_norm\": 0.4473684210526316,\n \"acc_norm_stderr\": 0.04677473004491199\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6068965517241379,\n \"acc_stderr\": 0.040703290137070705,\n \"acc_norm\": 0.6068965517241379,\n \"acc_norm_stderr\": 0.040703290137070705\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41798941798941797,\n \"acc_stderr\": 0.025402555503260912,\n \"acc_norm\": 0.41798941798941797,\n \"acc_norm_stderr\": 0.025402555503260912\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42063492063492064,\n \"acc_stderr\": 0.04415438226743744,\n \"acc_norm\": 0.42063492063492064,\n \"acc_norm_stderr\": 0.04415438226743744\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7354838709677419,\n \"acc_stderr\": 0.02509189237885928,\n \"acc_norm\": 0.7354838709677419,\n \"acc_norm_stderr\": 0.02509189237885928\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4876847290640394,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.4876847290640394,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.03192271569548301,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.03192271569548301\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7676767676767676,\n \"acc_stderr\": 0.030088629490217487,\n \"acc_norm\": 0.7676767676767676,\n \"acc_norm_stderr\": 0.030088629490217487\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8393782383419689,\n \"acc_stderr\": 0.026499057701397443,\n \"acc_norm\": 0.8393782383419689,\n \"acc_norm_stderr\": 0.026499057701397443\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6512820512820513,\n \"acc_stderr\": 0.02416278028401772,\n \"acc_norm\": 0.6512820512820513,\n \"acc_norm_stderr\": 0.02416278028401772\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.35185185185185186,\n \"acc_stderr\": 0.02911661760608301,\n \"acc_norm\": 0.35185185185185186,\n \"acc_norm_stderr\": 0.02911661760608301\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6386554621848739,\n \"acc_stderr\": 0.03120469122515001,\n \"acc_norm\": 0.6386554621848739,\n \"acc_norm_stderr\": 0.03120469122515001\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3443708609271523,\n \"acc_stderr\": 0.03879687024073327,\n \"acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.03879687024073327\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8201834862385321,\n \"acc_stderr\": 0.01646534546739152,\n \"acc_norm\": 0.8201834862385321,\n \"acc_norm_stderr\": 0.01646534546739152\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5787037037037037,\n \"acc_stderr\": 0.033674621388960775,\n \"acc_norm\": 0.5787037037037037,\n \"acc_norm_stderr\": 0.033674621388960775\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8088235294117647,\n \"acc_stderr\": 0.027599174300640766,\n \"acc_norm\": 0.8088235294117647,\n \"acc_norm_stderr\": 0.027599174300640766\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7890295358649789,\n \"acc_stderr\": 0.02655837250266192,\n \"acc_norm\": 0.7890295358649789,\n \"acc_norm_stderr\": 0.02655837250266192\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6816143497757847,\n \"acc_stderr\": 0.03126580522513713,\n \"acc_norm\": 0.6816143497757847,\n \"acc_norm_stderr\": 0.03126580522513713\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7022900763358778,\n \"acc_stderr\": 0.040103589424622034,\n \"acc_norm\": 0.7022900763358778,\n \"acc_norm_stderr\": 0.040103589424622034\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8099173553719008,\n \"acc_stderr\": 0.03581796951709282,\n \"acc_norm\": 0.8099173553719008,\n \"acc_norm_stderr\": 0.03581796951709282\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04186091791394607,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04186091791394607\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7423312883435583,\n \"acc_stderr\": 0.03436150827846917,\n \"acc_norm\": 0.7423312883435583,\n \"acc_norm_stderr\": 0.03436150827846917\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.04745789978762494,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.04745789978762494\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7572815533980582,\n \"acc_stderr\": 0.04245022486384495,\n \"acc_norm\": 0.7572815533980582,\n \"acc_norm_stderr\": 0.04245022486384495\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8418803418803419,\n \"acc_stderr\": 0.023902325549560417,\n \"acc_norm\": 0.8418803418803419,\n \"acc_norm_stderr\": 0.023902325549560417\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7484035759897829,\n \"acc_stderr\": 0.015517322365529633,\n \"acc_norm\": 0.7484035759897829,\n \"acc_norm_stderr\": 0.015517322365529633\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6878612716763006,\n \"acc_stderr\": 0.024946792225272314,\n \"acc_norm\": 0.6878612716763006,\n \"acc_norm_stderr\": 0.024946792225272314\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4692737430167598,\n \"acc_stderr\": 0.01669089616194438,\n \"acc_norm\": 0.4692737430167598,\n \"acc_norm_stderr\": 0.01669089616194438\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.02699254433929724,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.02699254433929724\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6784565916398714,\n \"acc_stderr\": 0.026527724079528872,\n \"acc_norm\": 0.6784565916398714,\n \"acc_norm_stderr\": 0.026527724079528872\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6882716049382716,\n \"acc_stderr\": 0.02577311116963045,\n \"acc_norm\": 0.6882716049382716,\n \"acc_norm_stderr\": 0.02577311116963045\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.44680851063829785,\n \"acc_stderr\": 0.029658235097666904,\n \"acc_norm\": 0.44680851063829785,\n \"acc_norm_stderr\": 0.029658235097666904\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.47392438070404175,\n \"acc_stderr\": 0.012752858346533133,\n \"acc_norm\": 0.47392438070404175,\n \"acc_norm_stderr\": 0.012752858346533133\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6397058823529411,\n \"acc_stderr\": 0.029163128570670733,\n \"acc_norm\": 0.6397058823529411,\n \"acc_norm_stderr\": 0.029163128570670733\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6323529411764706,\n \"acc_stderr\": 0.019506291693954854,\n \"acc_norm\": 0.6323529411764706,\n \"acc_norm_stderr\": 0.019506291693954854\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.04389311454644287,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.04389311454644287\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.636734693877551,\n \"acc_stderr\": 0.03078905113903081,\n \"acc_norm\": 0.636734693877551,\n \"acc_norm_stderr\": 0.03078905113903081\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8308457711442786,\n \"acc_stderr\": 0.026508590656233264,\n \"acc_norm\": 0.8308457711442786,\n \"acc_norm_stderr\": 0.026508590656233264\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.81,\n \"acc_stderr\": 0.03942772444036623,\n \"acc_norm\": 0.81,\n \"acc_norm_stderr\": 0.03942772444036623\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5120481927710844,\n \"acc_stderr\": 0.03891364495835817,\n \"acc_norm\": 0.5120481927710844,\n \"acc_norm_stderr\": 0.03891364495835817\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.783625730994152,\n \"acc_stderr\": 0.031581495393387324,\n \"acc_norm\": 0.783625730994152,\n \"acc_norm_stderr\": 0.031581495393387324\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5165238678090576,\n \"mc1_stderr\": 0.017493940190057723,\n \"mc2\": 0.6740086972319943,\n \"mc2_stderr\": 0.015471222805293889\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7868981846882399,\n \"acc_stderr\": 0.011508957690722743\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.47687642153146326,\n \"acc_stderr\": 0.013757748544245331\n }\n}\n```", "repo_url": "https://huggingface.co/mncai/agiin-13.6B-v0.0", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|arc:challenge|25_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|gsm8k|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hellaswag|10_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T15-55-21.950393.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["**/details_harness|winogrande|5_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T15-55-21.950393.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T15_55_21.950393", "path": ["results_2023-12-16T15-55-21.950393.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T15-55-21.950393.parquet"]}]}]}
2023-12-16T15:59:01+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of mncai/agiin-13.6B-v0.0 Dataset automatically created during the evaluation run of model mncai/agiin-13.6B-v0.0 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T15:55:21.950393(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of mncai/agiin-13.6B-v0.0\n\n\n\nDataset automatically created during the evaluation run of model mncai/agiin-13.6B-v0.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T15:55:21.950393(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of mncai/agiin-13.6B-v0.0\n\n\n\nDataset automatically created during the evaluation run of model mncai/agiin-13.6B-v0.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T15:55:21.950393(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 185, 66, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of mncai/agiin-13.6B-v0.0\n\n\n\nDataset automatically created during the evaluation run of model mncai/agiin-13.6B-v0.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T15:55:21.950393(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
6fcfde04045b7fb8ef7178ee7306eb3c820a4bee
# Dataset Card for Evaluation run of ignos/LeoScorpius-GreenNode-Platypus-7B-v1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [ignos/LeoScorpius-GreenNode-Platypus-7B-v1](https://huggingface.co/ignos/LeoScorpius-GreenNode-Platypus-7B-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ignos__LeoScorpius-GreenNode-Platypus-7B-v1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T16:00:30.709211](https://huggingface.co/datasets/open-llm-leaderboard/details_ignos__LeoScorpius-GreenNode-Platypus-7B-v1/blob/main/results_2023-12-16T16-00-30.709211.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6253622649843006, "acc_stderr": 0.03256427406790904, "acc_norm": 0.6254417624925451, "acc_norm_stderr": 0.033235993868121656, "mc1": 0.36107711138310894, "mc1_stderr": 0.016814312844836886, "mc2": 0.5277604131615456, "mc2_stderr": 0.01507398033093283 }, "harness|arc:challenge|25": { "acc": 0.6279863481228669, "acc_stderr": 0.014124597881844461, "acc_norm": 0.6604095563139932, "acc_norm_stderr": 0.013839039762820167 }, "harness|hellaswag|10": { "acc": 0.6799442342162916, "acc_stderr": 0.00465544276659947, "acc_norm": 0.8652658832901813, "acc_norm_stderr": 0.003407415513326042 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.562962962962963, "acc_stderr": 0.042849586397534015, "acc_norm": 0.562962962962963, "acc_norm_stderr": 0.042849586397534015 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6776315789473685, "acc_stderr": 0.03803510248351585, "acc_norm": 0.6776315789473685, "acc_norm_stderr": 0.03803510248351585 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836, "acc_norm": 0.58, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6792452830188679, "acc_stderr": 0.028727502957880267, "acc_norm": 0.6792452830188679, "acc_norm_stderr": 0.028727502957880267 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7361111111111112, "acc_stderr": 0.03685651095897532, "acc_norm": 0.7361111111111112, "acc_norm_stderr": 0.03685651095897532 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.45, "acc_stderr": 0.04999999999999999, "acc_norm": 0.45, "acc_norm_stderr": 0.04999999999999999 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6069364161849711, "acc_stderr": 0.03724249595817731, "acc_norm": 0.6069364161849711, "acc_norm_stderr": 0.03724249595817731 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.47058823529411764, "acc_stderr": 0.049665709039785295, "acc_norm": 0.47058823529411764, "acc_norm_stderr": 0.049665709039785295 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.72, "acc_stderr": 0.045126085985421276, "acc_norm": 0.72, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5659574468085107, "acc_stderr": 0.032400380867927465, "acc_norm": 0.5659574468085107, "acc_norm_stderr": 0.032400380867927465 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4824561403508772, "acc_stderr": 0.04700708033551038, "acc_norm": 0.4824561403508772, "acc_norm_stderr": 0.04700708033551038 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5724137931034483, "acc_stderr": 0.04122737111370333, "acc_norm": 0.5724137931034483, "acc_norm_stderr": 0.04122737111370333 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4126984126984127, "acc_stderr": 0.025355741263055266, "acc_norm": 0.4126984126984127, "acc_norm_stderr": 0.025355741263055266 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42063492063492064, "acc_stderr": 0.04415438226743744, "acc_norm": 0.42063492063492064, "acc_norm_stderr": 0.04415438226743744 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7741935483870968, "acc_stderr": 0.023785577884181015, "acc_norm": 0.7741935483870968, "acc_norm_stderr": 0.023785577884181015 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5024630541871922, "acc_stderr": 0.035179450386910616, "acc_norm": 0.5024630541871922, "acc_norm_stderr": 0.035179450386910616 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.63, "acc_stderr": 0.04852365870939099, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7333333333333333, "acc_stderr": 0.03453131801885417, "acc_norm": 0.7333333333333333, "acc_norm_stderr": 0.03453131801885417 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8232323232323232, "acc_stderr": 0.027178752639044915, "acc_norm": 0.8232323232323232, "acc_norm_stderr": 0.027178752639044915 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8808290155440415, "acc_stderr": 0.023381935348121462, "acc_norm": 0.8808290155440415, "acc_norm_stderr": 0.023381935348121462 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6641025641025641, "acc_stderr": 0.023946724741563983, "acc_norm": 0.6641025641025641, "acc_norm_stderr": 0.023946724741563983 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.32592592592592595, "acc_stderr": 0.02857834836547308, "acc_norm": 0.32592592592592595, "acc_norm_stderr": 0.02857834836547308 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7058823529411765, "acc_stderr": 0.029597329730978093, "acc_norm": 0.7058823529411765, "acc_norm_stderr": 0.029597329730978093 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.32450331125827814, "acc_stderr": 0.03822746937658753, "acc_norm": 0.32450331125827814, "acc_norm_stderr": 0.03822746937658753 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8513761467889909, "acc_stderr": 0.015251253773660836, "acc_norm": 0.8513761467889909, "acc_norm_stderr": 0.015251253773660836 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5740740740740741, "acc_stderr": 0.03372343271653063, "acc_norm": 0.5740740740740741, "acc_norm_stderr": 0.03372343271653063 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7794117647058824, "acc_stderr": 0.02910225438967407, "acc_norm": 0.7794117647058824, "acc_norm_stderr": 0.02910225438967407 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7805907172995781, "acc_stderr": 0.026939106581553945, "acc_norm": 0.7805907172995781, "acc_norm_stderr": 0.026939106581553945 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.672645739910314, "acc_stderr": 0.03149384670994131, "acc_norm": 0.672645739910314, "acc_norm_stderr": 0.03149384670994131 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7480916030534351, "acc_stderr": 0.03807387116306085, "acc_norm": 0.7480916030534351, "acc_norm_stderr": 0.03807387116306085 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7520661157024794, "acc_stderr": 0.03941897526516303, "acc_norm": 0.7520661157024794, "acc_norm_stderr": 0.03941897526516303 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.75, "acc_stderr": 0.04186091791394607, "acc_norm": 0.75, "acc_norm_stderr": 0.04186091791394607 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7423312883435583, "acc_stderr": 0.03436150827846917, "acc_norm": 0.7423312883435583, "acc_norm_stderr": 0.03436150827846917 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.375, "acc_stderr": 0.04595091388086298, "acc_norm": 0.375, "acc_norm_stderr": 0.04595091388086298 }, "harness|hendrycksTest-management|5": { "acc": 0.8155339805825242, "acc_stderr": 0.03840423627288276, "acc_norm": 0.8155339805825242, "acc_norm_stderr": 0.03840423627288276 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8376068376068376, "acc_stderr": 0.024161618127987745, "acc_norm": 0.8376068376068376, "acc_norm_stderr": 0.024161618127987745 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7931034482758621, "acc_stderr": 0.014485656041669173, "acc_norm": 0.7931034482758621, "acc_norm_stderr": 0.014485656041669173 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.638728323699422, "acc_stderr": 0.025862201852277885, "acc_norm": 0.638728323699422, "acc_norm_stderr": 0.025862201852277885 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3743016759776536, "acc_stderr": 0.01618544417945717, "acc_norm": 0.3743016759776536, "acc_norm_stderr": 0.01618544417945717 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7254901960784313, "acc_stderr": 0.025553169991826524, "acc_norm": 0.7254901960784313, "acc_norm_stderr": 0.025553169991826524 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6527331189710611, "acc_stderr": 0.027040745502307336, "acc_norm": 0.6527331189710611, "acc_norm_stderr": 0.027040745502307336 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.691358024691358, "acc_stderr": 0.025702640260603742, "acc_norm": 0.691358024691358, "acc_norm_stderr": 0.025702640260603742 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4432624113475177, "acc_stderr": 0.029634838473766002, "acc_norm": 0.4432624113475177, "acc_norm_stderr": 0.029634838473766002 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.3898305084745763, "acc_stderr": 0.012456386619082606, "acc_norm": 0.3898305084745763, "acc_norm_stderr": 0.012456386619082606 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6544117647058824, "acc_stderr": 0.028888193103988633, "acc_norm": 0.6544117647058824, "acc_norm_stderr": 0.028888193103988633 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6111111111111112, "acc_stderr": 0.019722058939618075, "acc_norm": 0.6111111111111112, "acc_norm_stderr": 0.019722058939618075 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6545454545454545, "acc_stderr": 0.04554619617541053, "acc_norm": 0.6545454545454545, "acc_norm_stderr": 0.04554619617541053 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6653061224489796, "acc_stderr": 0.030209235226242307, "acc_norm": 0.6653061224489796, "acc_norm_stderr": 0.030209235226242307 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8258706467661692, "acc_stderr": 0.026814951200421603, "acc_norm": 0.8258706467661692, "acc_norm_stderr": 0.026814951200421603 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.03588702812826371, "acc_norm": 0.85, "acc_norm_stderr": 0.03588702812826371 }, "harness|hendrycksTest-virology|5": { "acc": 0.5240963855421686, "acc_stderr": 0.03887971849597264, "acc_norm": 0.5240963855421686, "acc_norm_stderr": 0.03887971849597264 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.36107711138310894, "mc1_stderr": 0.016814312844836886, "mc2": 0.5277604131615456, "mc2_stderr": 0.01507398033093283 }, "harness|winogrande|5": { "acc": 0.8216258879242304, "acc_stderr": 0.010759352014855932 }, "harness|gsm8k|5": { "acc": 0.6421531463229719, "acc_stderr": 0.013204142536119944 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_ignos__LeoScorpius-GreenNode-Platypus-7B-v1
[ "region:us" ]
2023-12-16T16:03:21+00:00
{"pretty_name": "Evaluation run of ignos/LeoScorpius-GreenNode-Platypus-7B-v1", "dataset_summary": "Dataset automatically created during the evaluation run of model [ignos/LeoScorpius-GreenNode-Platypus-7B-v1](https://huggingface.co/ignos/LeoScorpius-GreenNode-Platypus-7B-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ignos__LeoScorpius-GreenNode-Platypus-7B-v1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T16:00:30.709211](https://huggingface.co/datasets/open-llm-leaderboard/details_ignos__LeoScorpius-GreenNode-Platypus-7B-v1/blob/main/results_2023-12-16T16-00-30.709211.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6253622649843006,\n \"acc_stderr\": 0.03256427406790904,\n \"acc_norm\": 0.6254417624925451,\n \"acc_norm_stderr\": 0.033235993868121656,\n \"mc1\": 0.36107711138310894,\n \"mc1_stderr\": 0.016814312844836886,\n \"mc2\": 0.5277604131615456,\n \"mc2_stderr\": 0.01507398033093283\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6279863481228669,\n \"acc_stderr\": 0.014124597881844461,\n \"acc_norm\": 0.6604095563139932,\n \"acc_norm_stderr\": 0.013839039762820167\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6799442342162916,\n \"acc_stderr\": 0.00465544276659947,\n \"acc_norm\": 0.8652658832901813,\n \"acc_norm_stderr\": 0.003407415513326042\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.562962962962963,\n \"acc_stderr\": 0.042849586397534015,\n \"acc_norm\": 0.562962962962963,\n \"acc_norm_stderr\": 0.042849586397534015\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6776315789473685,\n \"acc_stderr\": 0.03803510248351585,\n \"acc_norm\": 0.6776315789473685,\n \"acc_norm_stderr\": 0.03803510248351585\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6792452830188679,\n \"acc_stderr\": 0.028727502957880267,\n \"acc_norm\": 0.6792452830188679,\n \"acc_norm_stderr\": 0.028727502957880267\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7361111111111112,\n \"acc_stderr\": 0.03685651095897532,\n \"acc_norm\": 0.7361111111111112,\n \"acc_norm_stderr\": 0.03685651095897532\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.04999999999999999,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.04999999999999999\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6069364161849711,\n \"acc_stderr\": 0.03724249595817731,\n \"acc_norm\": 0.6069364161849711,\n \"acc_norm_stderr\": 0.03724249595817731\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.47058823529411764,\n \"acc_stderr\": 0.049665709039785295,\n \"acc_norm\": 0.47058823529411764,\n \"acc_norm_stderr\": 0.049665709039785295\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5659574468085107,\n \"acc_stderr\": 0.032400380867927465,\n \"acc_norm\": 0.5659574468085107,\n \"acc_norm_stderr\": 0.032400380867927465\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4824561403508772,\n \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.4824561403508772,\n \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5724137931034483,\n \"acc_stderr\": 0.04122737111370333,\n \"acc_norm\": 0.5724137931034483,\n \"acc_norm_stderr\": 0.04122737111370333\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4126984126984127,\n \"acc_stderr\": 0.025355741263055266,\n \"acc_norm\": 0.4126984126984127,\n \"acc_norm_stderr\": 0.025355741263055266\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42063492063492064,\n \"acc_stderr\": 0.04415438226743744,\n \"acc_norm\": 0.42063492063492064,\n \"acc_norm_stderr\": 0.04415438226743744\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7741935483870968,\n \"acc_stderr\": 0.023785577884181015,\n \"acc_norm\": 0.7741935483870968,\n \"acc_norm_stderr\": 0.023785577884181015\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5024630541871922,\n \"acc_stderr\": 0.035179450386910616,\n \"acc_norm\": 0.5024630541871922,\n \"acc_norm_stderr\": 0.035179450386910616\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7333333333333333,\n \"acc_stderr\": 0.03453131801885417,\n \"acc_norm\": 0.7333333333333333,\n \"acc_norm_stderr\": 0.03453131801885417\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8232323232323232,\n \"acc_stderr\": 0.027178752639044915,\n \"acc_norm\": 0.8232323232323232,\n \"acc_norm_stderr\": 0.027178752639044915\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8808290155440415,\n \"acc_stderr\": 0.023381935348121462,\n \"acc_norm\": 0.8808290155440415,\n \"acc_norm_stderr\": 0.023381935348121462\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6641025641025641,\n \"acc_stderr\": 0.023946724741563983,\n \"acc_norm\": 0.6641025641025641,\n \"acc_norm_stderr\": 0.023946724741563983\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.32592592592592595,\n \"acc_stderr\": 0.02857834836547308,\n \"acc_norm\": 0.32592592592592595,\n \"acc_norm_stderr\": 0.02857834836547308\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7058823529411765,\n \"acc_stderr\": 0.029597329730978093,\n \"acc_norm\": 0.7058823529411765,\n \"acc_norm_stderr\": 0.029597329730978093\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.32450331125827814,\n \"acc_stderr\": 0.03822746937658753,\n \"acc_norm\": 0.32450331125827814,\n \"acc_norm_stderr\": 0.03822746937658753\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8513761467889909,\n \"acc_stderr\": 0.015251253773660836,\n \"acc_norm\": 0.8513761467889909,\n \"acc_norm_stderr\": 0.015251253773660836\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5740740740740741,\n \"acc_stderr\": 0.03372343271653063,\n \"acc_norm\": 0.5740740740740741,\n \"acc_norm_stderr\": 0.03372343271653063\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7794117647058824,\n \"acc_stderr\": 0.02910225438967407,\n \"acc_norm\": 0.7794117647058824,\n \"acc_norm_stderr\": 0.02910225438967407\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7805907172995781,\n \"acc_stderr\": 0.026939106581553945,\n \"acc_norm\": 0.7805907172995781,\n \"acc_norm_stderr\": 0.026939106581553945\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.672645739910314,\n \"acc_stderr\": 0.03149384670994131,\n \"acc_norm\": 0.672645739910314,\n \"acc_norm_stderr\": 0.03149384670994131\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7480916030534351,\n \"acc_stderr\": 0.03807387116306085,\n \"acc_norm\": 0.7480916030534351,\n \"acc_norm_stderr\": 0.03807387116306085\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7520661157024794,\n \"acc_stderr\": 0.03941897526516303,\n \"acc_norm\": 0.7520661157024794,\n \"acc_norm_stderr\": 0.03941897526516303\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04186091791394607,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04186091791394607\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7423312883435583,\n \"acc_stderr\": 0.03436150827846917,\n \"acc_norm\": 0.7423312883435583,\n \"acc_norm_stderr\": 0.03436150827846917\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.375,\n \"acc_stderr\": 0.04595091388086298,\n \"acc_norm\": 0.375,\n \"acc_norm_stderr\": 0.04595091388086298\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8155339805825242,\n \"acc_stderr\": 0.03840423627288276,\n \"acc_norm\": 0.8155339805825242,\n \"acc_norm_stderr\": 0.03840423627288276\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8376068376068376,\n \"acc_stderr\": 0.024161618127987745,\n \"acc_norm\": 0.8376068376068376,\n \"acc_norm_stderr\": 0.024161618127987745\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7931034482758621,\n \"acc_stderr\": 0.014485656041669173,\n \"acc_norm\": 0.7931034482758621,\n \"acc_norm_stderr\": 0.014485656041669173\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.638728323699422,\n \"acc_stderr\": 0.025862201852277885,\n \"acc_norm\": 0.638728323699422,\n \"acc_norm_stderr\": 0.025862201852277885\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3743016759776536,\n \"acc_stderr\": 0.01618544417945717,\n \"acc_norm\": 0.3743016759776536,\n \"acc_norm_stderr\": 0.01618544417945717\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7254901960784313,\n \"acc_stderr\": 0.025553169991826524,\n \"acc_norm\": 0.7254901960784313,\n \"acc_norm_stderr\": 0.025553169991826524\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6527331189710611,\n \"acc_stderr\": 0.027040745502307336,\n \"acc_norm\": 0.6527331189710611,\n \"acc_norm_stderr\": 0.027040745502307336\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.691358024691358,\n \"acc_stderr\": 0.025702640260603742,\n \"acc_norm\": 0.691358024691358,\n \"acc_norm_stderr\": 0.025702640260603742\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4432624113475177,\n \"acc_stderr\": 0.029634838473766002,\n \"acc_norm\": 0.4432624113475177,\n \"acc_norm_stderr\": 0.029634838473766002\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3898305084745763,\n \"acc_stderr\": 0.012456386619082606,\n \"acc_norm\": 0.3898305084745763,\n \"acc_norm_stderr\": 0.012456386619082606\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6544117647058824,\n \"acc_stderr\": 0.028888193103988633,\n \"acc_norm\": 0.6544117647058824,\n \"acc_norm_stderr\": 0.028888193103988633\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6111111111111112,\n \"acc_stderr\": 0.019722058939618075,\n \"acc_norm\": 0.6111111111111112,\n \"acc_norm_stderr\": 0.019722058939618075\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n \"acc_stderr\": 0.04554619617541053,\n \"acc_norm\": 0.6545454545454545,\n \"acc_norm_stderr\": 0.04554619617541053\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6653061224489796,\n \"acc_stderr\": 0.030209235226242307,\n \"acc_norm\": 0.6653061224489796,\n \"acc_norm_stderr\": 0.030209235226242307\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8258706467661692,\n \"acc_stderr\": 0.026814951200421603,\n \"acc_norm\": 0.8258706467661692,\n \"acc_norm_stderr\": 0.026814951200421603\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.03588702812826371,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.03588702812826371\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5240963855421686,\n \"acc_stderr\": 0.03887971849597264,\n \"acc_norm\": 0.5240963855421686,\n \"acc_norm_stderr\": 0.03887971849597264\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.36107711138310894,\n \"mc1_stderr\": 0.016814312844836886,\n \"mc2\": 0.5277604131615456,\n \"mc2_stderr\": 0.01507398033093283\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8216258879242304,\n \"acc_stderr\": 0.010759352014855932\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6421531463229719,\n \"acc_stderr\": 0.013204142536119944\n }\n}\n```", "repo_url": "https://huggingface.co/ignos/LeoScorpius-GreenNode-Platypus-7B-v1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|arc:challenge|25_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|gsm8k|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hellaswag|10_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T16-00-30.709211.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["**/details_harness|winogrande|5_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T16-00-30.709211.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T16_00_30.709211", "path": ["results_2023-12-16T16-00-30.709211.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T16-00-30.709211.parquet"]}]}]}
2023-12-16T16:04:16+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of ignos/LeoScorpius-GreenNode-Platypus-7B-v1 Dataset automatically created during the evaluation run of model ignos/LeoScorpius-GreenNode-Platypus-7B-v1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T16:00:30.709211(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of ignos/LeoScorpius-GreenNode-Platypus-7B-v1\n\n\n\nDataset automatically created during the evaluation run of model ignos/LeoScorpius-GreenNode-Platypus-7B-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T16:00:30.709211(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of ignos/LeoScorpius-GreenNode-Platypus-7B-v1\n\n\n\nDataset automatically created during the evaluation run of model ignos/LeoScorpius-GreenNode-Platypus-7B-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T16:00:30.709211(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 205, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of ignos/LeoScorpius-GreenNode-Platypus-7B-v1\n\n\n\nDataset automatically created during the evaluation run of model ignos/LeoScorpius-GreenNode-Platypus-7B-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T16:00:30.709211(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]" ]
f3f3ccde3a84b2b8d4599d620066637e6d0155bc
# Dataset Card for Evaluation run of sarvamai/OpenHathi-7B-Hi-v0.1-Base <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [sarvamai/OpenHathi-7B-Hi-v0.1-Base](https://huggingface.co/sarvamai/OpenHathi-7B-Hi-v0.1-Base) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_sarvamai__OpenHathi-7B-Hi-v0.1-Base", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T16:03:14.382672](https://huggingface.co/datasets/open-llm-leaderboard/details_sarvamai__OpenHathi-7B-Hi-v0.1-Base/blob/main/results_2023-12-16T16-03-14.382672.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.4158144377668671, "acc_stderr": 0.03431583720305929, "acc_norm": 0.42077773987307815, "acc_norm_stderr": 0.03514420382029662, "mc1": 0.23623011015911874, "mc1_stderr": 0.014869755015871114, "mc2": 0.37462221164216, "mc2_stderr": 0.014208268852646139 }, "harness|arc:challenge|25": { "acc": 0.4539249146757679, "acc_stderr": 0.014549221105171872, "acc_norm": 0.4948805460750853, "acc_norm_stderr": 0.01461062489030916 }, "harness|hellaswag|10": { "acc": 0.5512846046604262, "acc_stderr": 0.00496346465774724, "acc_norm": 0.7433778131846246, "acc_norm_stderr": 0.004358764596401024 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.42962962962962964, "acc_stderr": 0.04276349494376599, "acc_norm": 0.42962962962962964, "acc_norm_stderr": 0.04276349494376599 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.40131578947368424, "acc_stderr": 0.039889037033362836, "acc_norm": 0.40131578947368424, "acc_norm_stderr": 0.039889037033362836 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.38113207547169814, "acc_stderr": 0.02989060968628664, "acc_norm": 0.38113207547169814, "acc_norm_stderr": 0.02989060968628664 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.4097222222222222, "acc_stderr": 0.04112490974670787, "acc_norm": 0.4097222222222222, "acc_norm_stderr": 0.04112490974670787 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.39, "acc_stderr": 0.04902071300001974, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001974 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.3179190751445087, "acc_stderr": 0.0355068398916558, "acc_norm": 0.3179190751445087, "acc_norm_stderr": 0.0355068398916558 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.24509803921568626, "acc_stderr": 0.042801058373643966, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.042801058373643966 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.34893617021276596, "acc_stderr": 0.031158522131357783, "acc_norm": 0.34893617021276596, "acc_norm_stderr": 0.031158522131357783 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.19298245614035087, "acc_stderr": 0.03712454853721368, "acc_norm": 0.19298245614035087, "acc_norm_stderr": 0.03712454853721368 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.4482758620689655, "acc_stderr": 0.04144311810878151, "acc_norm": 0.4482758620689655, "acc_norm_stderr": 0.04144311810878151 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2671957671957672, "acc_stderr": 0.022789673145776564, "acc_norm": 0.2671957671957672, "acc_norm_stderr": 0.022789673145776564 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3412698412698413, "acc_stderr": 0.04240799327574925, "acc_norm": 0.3412698412698413, "acc_norm_stderr": 0.04240799327574925 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.44516129032258067, "acc_stderr": 0.028272410186214906, "acc_norm": 0.44516129032258067, "acc_norm_stderr": 0.028272410186214906 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.22660098522167488, "acc_stderr": 0.029454863835292965, "acc_norm": 0.22660098522167488, "acc_norm_stderr": 0.029454863835292965 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.5636363636363636, "acc_stderr": 0.03872592983524754, "acc_norm": 0.5636363636363636, "acc_norm_stderr": 0.03872592983524754 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.46464646464646464, "acc_stderr": 0.035534363688280626, "acc_norm": 0.46464646464646464, "acc_norm_stderr": 0.035534363688280626 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.5751295336787565, "acc_stderr": 0.03567471335212539, "acc_norm": 0.5751295336787565, "acc_norm_stderr": 0.03567471335212539 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.37948717948717947, "acc_stderr": 0.024603626924097417, "acc_norm": 0.37948717948717947, "acc_norm_stderr": 0.024603626924097417 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.27037037037037037, "acc_stderr": 0.027080372815145665, "acc_norm": 0.27037037037037037, "acc_norm_stderr": 0.027080372815145665 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.35714285714285715, "acc_stderr": 0.031124619309328177, "acc_norm": 0.35714285714285715, "acc_norm_stderr": 0.031124619309328177 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2847682119205298, "acc_stderr": 0.03684881521389023, "acc_norm": 0.2847682119205298, "acc_norm_stderr": 0.03684881521389023 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.45871559633027525, "acc_stderr": 0.0213641225338817, "acc_norm": 0.45871559633027525, "acc_norm_stderr": 0.0213641225338817 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.2962962962962963, "acc_stderr": 0.03114144782353603, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.03114144782353603 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.5441176470588235, "acc_stderr": 0.03495624522015476, "acc_norm": 0.5441176470588235, "acc_norm_stderr": 0.03495624522015476 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.5569620253164557, "acc_stderr": 0.032335327775334835, "acc_norm": 0.5569620253164557, "acc_norm_stderr": 0.032335327775334835 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.48878923766816146, "acc_stderr": 0.033549366530984746, "acc_norm": 0.48878923766816146, "acc_norm_stderr": 0.033549366530984746 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.42748091603053434, "acc_stderr": 0.04338920305792401, "acc_norm": 0.42748091603053434, "acc_norm_stderr": 0.04338920305792401 }, "harness|hendrycksTest-international_law|5": { "acc": 0.5702479338842975, "acc_stderr": 0.045190820213197716, "acc_norm": 0.5702479338842975, "acc_norm_stderr": 0.045190820213197716 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.49074074074074076, "acc_stderr": 0.04832853553437055, "acc_norm": 0.49074074074074076, "acc_norm_stderr": 0.04832853553437055 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.3558282208588957, "acc_stderr": 0.03761521380046734, "acc_norm": 0.3558282208588957, "acc_norm_stderr": 0.03761521380046734 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.24107142857142858, "acc_stderr": 0.04059867246952687, "acc_norm": 0.24107142857142858, "acc_norm_stderr": 0.04059867246952687 }, "harness|hendrycksTest-management|5": { "acc": 0.42718446601941745, "acc_stderr": 0.048979577377811695, "acc_norm": 0.42718446601941745, "acc_norm_stderr": 0.048979577377811695 }, "harness|hendrycksTest-marketing|5": { "acc": 0.5641025641025641, "acc_stderr": 0.032485775115784016, "acc_norm": 0.5641025641025641, "acc_norm_stderr": 0.032485775115784016 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.5351213282247765, "acc_stderr": 0.017835798806290642, "acc_norm": 0.5351213282247765, "acc_norm_stderr": 0.017835798806290642 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.44508670520231214, "acc_stderr": 0.02675625512966377, "acc_norm": 0.44508670520231214, "acc_norm_stderr": 0.02675625512966377 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23910614525139665, "acc_stderr": 0.014265554192331144, "acc_norm": 0.23910614525139665, "acc_norm_stderr": 0.014265554192331144 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.43137254901960786, "acc_stderr": 0.02835895631342355, "acc_norm": 0.43137254901960786, "acc_norm_stderr": 0.02835895631342355 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.45980707395498394, "acc_stderr": 0.028306190403305696, "acc_norm": 0.45980707395498394, "acc_norm_stderr": 0.028306190403305696 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.4660493827160494, "acc_stderr": 0.027756535257347666, "acc_norm": 0.4660493827160494, "acc_norm_stderr": 0.027756535257347666 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.31560283687943264, "acc_stderr": 0.027724989449509317, "acc_norm": 0.31560283687943264, "acc_norm_stderr": 0.027724989449509317 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.31421121251629724, "acc_stderr": 0.011855911587048226, "acc_norm": 0.31421121251629724, "acc_norm_stderr": 0.011855911587048226 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.4411764705882353, "acc_stderr": 0.03016191193076711, "acc_norm": 0.4411764705882353, "acc_norm_stderr": 0.03016191193076711 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.39052287581699346, "acc_stderr": 0.019737008998094597, "acc_norm": 0.39052287581699346, "acc_norm_stderr": 0.019737008998094597 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.45454545454545453, "acc_stderr": 0.04769300568972743, "acc_norm": 0.45454545454545453, "acc_norm_stderr": 0.04769300568972743 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.47346938775510206, "acc_stderr": 0.03196412734523272, "acc_norm": 0.47346938775510206, "acc_norm_stderr": 0.03196412734523272 }, "harness|hendrycksTest-sociology|5": { "acc": 0.5572139303482587, "acc_stderr": 0.03512310964123937, "acc_norm": 0.5572139303482587, "acc_norm_stderr": 0.03512310964123937 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.6, "acc_stderr": 0.049236596391733084, "acc_norm": 0.6, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-virology|5": { "acc": 0.39759036144578314, "acc_stderr": 0.03809973084540218, "acc_norm": 0.39759036144578314, "acc_norm_stderr": 0.03809973084540218 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.5847953216374269, "acc_stderr": 0.03779275945503201, "acc_norm": 0.5847953216374269, "acc_norm_stderr": 0.03779275945503201 }, "harness|truthfulqa:mc|0": { "mc1": 0.23623011015911874, "mc1_stderr": 0.014869755015871114, "mc2": 0.37462221164216, "mc2_stderr": 0.014208268852646139 }, "harness|winogrande|5": { "acc": 0.712707182320442, "acc_stderr": 0.012717481052478035 }, "harness|gsm8k|5": { "acc": 0.05913570887035633, "acc_stderr": 0.006497266660428833 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_sarvamai__OpenHathi-7B-Hi-v0.1-Base
[ "region:us" ]
2023-12-16T16:06:07+00:00
{"pretty_name": "Evaluation run of sarvamai/OpenHathi-7B-Hi-v0.1-Base", "dataset_summary": "Dataset automatically created during the evaluation run of model [sarvamai/OpenHathi-7B-Hi-v0.1-Base](https://huggingface.co/sarvamai/OpenHathi-7B-Hi-v0.1-Base) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_sarvamai__OpenHathi-7B-Hi-v0.1-Base\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T16:03:14.382672](https://huggingface.co/datasets/open-llm-leaderboard/details_sarvamai__OpenHathi-7B-Hi-v0.1-Base/blob/main/results_2023-12-16T16-03-14.382672.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.4158144377668671,\n \"acc_stderr\": 0.03431583720305929,\n \"acc_norm\": 0.42077773987307815,\n \"acc_norm_stderr\": 0.03514420382029662,\n \"mc1\": 0.23623011015911874,\n \"mc1_stderr\": 0.014869755015871114,\n \"mc2\": 0.37462221164216,\n \"mc2_stderr\": 0.014208268852646139\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.4539249146757679,\n \"acc_stderr\": 0.014549221105171872,\n \"acc_norm\": 0.4948805460750853,\n \"acc_norm_stderr\": 0.01461062489030916\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5512846046604262,\n \"acc_stderr\": 0.00496346465774724,\n \"acc_norm\": 0.7433778131846246,\n \"acc_norm_stderr\": 0.004358764596401024\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.42962962962962964,\n \"acc_stderr\": 0.04276349494376599,\n \"acc_norm\": 0.42962962962962964,\n \"acc_norm_stderr\": 0.04276349494376599\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.40131578947368424,\n \"acc_stderr\": 0.039889037033362836,\n \"acc_norm\": 0.40131578947368424,\n \"acc_norm_stderr\": 0.039889037033362836\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.38113207547169814,\n \"acc_stderr\": 0.02989060968628664,\n \"acc_norm\": 0.38113207547169814,\n \"acc_norm_stderr\": 0.02989060968628664\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.4097222222222222,\n \"acc_stderr\": 0.04112490974670787,\n \"acc_norm\": 0.4097222222222222,\n \"acc_norm_stderr\": 0.04112490974670787\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001974,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001974\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3179190751445087,\n \"acc_stderr\": 0.0355068398916558,\n \"acc_norm\": 0.3179190751445087,\n \"acc_norm_stderr\": 0.0355068398916558\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.24509803921568626,\n \"acc_stderr\": 0.042801058373643966,\n \"acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.042801058373643966\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.34893617021276596,\n \"acc_stderr\": 0.031158522131357783,\n \"acc_norm\": 0.34893617021276596,\n \"acc_norm_stderr\": 0.031158522131357783\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.19298245614035087,\n \"acc_stderr\": 0.03712454853721368,\n \"acc_norm\": 0.19298245614035087,\n \"acc_norm_stderr\": 0.03712454853721368\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.4482758620689655,\n \"acc_stderr\": 0.04144311810878151,\n \"acc_norm\": 0.4482758620689655,\n \"acc_norm_stderr\": 0.04144311810878151\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2671957671957672,\n \"acc_stderr\": 0.022789673145776564,\n \"acc_norm\": 0.2671957671957672,\n \"acc_norm_stderr\": 0.022789673145776564\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3412698412698413,\n \"acc_stderr\": 0.04240799327574925,\n \"acc_norm\": 0.3412698412698413,\n \"acc_norm_stderr\": 0.04240799327574925\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.44516129032258067,\n \"acc_stderr\": 0.028272410186214906,\n \"acc_norm\": 0.44516129032258067,\n \"acc_norm_stderr\": 0.028272410186214906\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.22660098522167488,\n \"acc_stderr\": 0.029454863835292965,\n \"acc_norm\": 0.22660098522167488,\n \"acc_norm_stderr\": 0.029454863835292965\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.5636363636363636,\n \"acc_stderr\": 0.03872592983524754,\n \"acc_norm\": 0.5636363636363636,\n \"acc_norm_stderr\": 0.03872592983524754\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.46464646464646464,\n \"acc_stderr\": 0.035534363688280626,\n \"acc_norm\": 0.46464646464646464,\n \"acc_norm_stderr\": 0.035534363688280626\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.5751295336787565,\n \"acc_stderr\": 0.03567471335212539,\n \"acc_norm\": 0.5751295336787565,\n \"acc_norm_stderr\": 0.03567471335212539\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.37948717948717947,\n \"acc_stderr\": 0.024603626924097417,\n \"acc_norm\": 0.37948717948717947,\n \"acc_norm_stderr\": 0.024603626924097417\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.27037037037037037,\n \"acc_stderr\": 0.027080372815145665,\n \"acc_norm\": 0.27037037037037037,\n \"acc_norm_stderr\": 0.027080372815145665\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.35714285714285715,\n \"acc_stderr\": 0.031124619309328177,\n \"acc_norm\": 0.35714285714285715,\n \"acc_norm_stderr\": 0.031124619309328177\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2847682119205298,\n \"acc_stderr\": 0.03684881521389023,\n \"acc_norm\": 0.2847682119205298,\n \"acc_norm_stderr\": 0.03684881521389023\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.45871559633027525,\n \"acc_stderr\": 0.0213641225338817,\n \"acc_norm\": 0.45871559633027525,\n \"acc_norm_stderr\": 0.0213641225338817\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.2962962962962963,\n \"acc_stderr\": 0.03114144782353603,\n \"acc_norm\": 0.2962962962962963,\n \"acc_norm_stderr\": 0.03114144782353603\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.5441176470588235,\n \"acc_stderr\": 0.03495624522015476,\n \"acc_norm\": 0.5441176470588235,\n \"acc_norm_stderr\": 0.03495624522015476\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.5569620253164557,\n \"acc_stderr\": 0.032335327775334835,\n \"acc_norm\": 0.5569620253164557,\n \"acc_norm_stderr\": 0.032335327775334835\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.48878923766816146,\n \"acc_stderr\": 0.033549366530984746,\n \"acc_norm\": 0.48878923766816146,\n \"acc_norm_stderr\": 0.033549366530984746\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.42748091603053434,\n \"acc_stderr\": 0.04338920305792401,\n \"acc_norm\": 0.42748091603053434,\n \"acc_norm_stderr\": 0.04338920305792401\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.5702479338842975,\n \"acc_stderr\": 0.045190820213197716,\n \"acc_norm\": 0.5702479338842975,\n \"acc_norm_stderr\": 0.045190820213197716\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.49074074074074076,\n \"acc_stderr\": 0.04832853553437055,\n \"acc_norm\": 0.49074074074074076,\n \"acc_norm_stderr\": 0.04832853553437055\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.3558282208588957,\n \"acc_stderr\": 0.03761521380046734,\n \"acc_norm\": 0.3558282208588957,\n \"acc_norm_stderr\": 0.03761521380046734\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.24107142857142858,\n \"acc_stderr\": 0.04059867246952687,\n \"acc_norm\": 0.24107142857142858,\n \"acc_norm_stderr\": 0.04059867246952687\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.42718446601941745,\n \"acc_stderr\": 0.048979577377811695,\n \"acc_norm\": 0.42718446601941745,\n \"acc_norm_stderr\": 0.048979577377811695\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.5641025641025641,\n \"acc_stderr\": 0.032485775115784016,\n \"acc_norm\": 0.5641025641025641,\n \"acc_norm_stderr\": 0.032485775115784016\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.5351213282247765,\n \"acc_stderr\": 0.017835798806290642,\n \"acc_norm\": 0.5351213282247765,\n \"acc_norm_stderr\": 0.017835798806290642\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.44508670520231214,\n \"acc_stderr\": 0.02675625512966377,\n \"acc_norm\": 0.44508670520231214,\n \"acc_norm_stderr\": 0.02675625512966377\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23910614525139665,\n \"acc_stderr\": 0.014265554192331144,\n \"acc_norm\": 0.23910614525139665,\n \"acc_norm_stderr\": 0.014265554192331144\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.43137254901960786,\n \"acc_stderr\": 0.02835895631342355,\n \"acc_norm\": 0.43137254901960786,\n \"acc_norm_stderr\": 0.02835895631342355\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.45980707395498394,\n \"acc_stderr\": 0.028306190403305696,\n \"acc_norm\": 0.45980707395498394,\n \"acc_norm_stderr\": 0.028306190403305696\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.4660493827160494,\n \"acc_stderr\": 0.027756535257347666,\n \"acc_norm\": 0.4660493827160494,\n \"acc_norm_stderr\": 0.027756535257347666\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.31560283687943264,\n \"acc_stderr\": 0.027724989449509317,\n \"acc_norm\": 0.31560283687943264,\n \"acc_norm_stderr\": 0.027724989449509317\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.31421121251629724,\n \"acc_stderr\": 0.011855911587048226,\n \"acc_norm\": 0.31421121251629724,\n \"acc_norm_stderr\": 0.011855911587048226\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.4411764705882353,\n \"acc_stderr\": 0.03016191193076711,\n \"acc_norm\": 0.4411764705882353,\n \"acc_norm_stderr\": 0.03016191193076711\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.39052287581699346,\n \"acc_stderr\": 0.019737008998094597,\n \"acc_norm\": 0.39052287581699346,\n \"acc_norm_stderr\": 0.019737008998094597\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.45454545454545453,\n \"acc_stderr\": 0.04769300568972743,\n \"acc_norm\": 0.45454545454545453,\n \"acc_norm_stderr\": 0.04769300568972743\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.47346938775510206,\n \"acc_stderr\": 0.03196412734523272,\n \"acc_norm\": 0.47346938775510206,\n \"acc_norm_stderr\": 0.03196412734523272\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.5572139303482587,\n \"acc_stderr\": 0.03512310964123937,\n \"acc_norm\": 0.5572139303482587,\n \"acc_norm_stderr\": 0.03512310964123937\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.39759036144578314,\n \"acc_stderr\": 0.03809973084540218,\n \"acc_norm\": 0.39759036144578314,\n \"acc_norm_stderr\": 0.03809973084540218\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.5847953216374269,\n \"acc_stderr\": 0.03779275945503201,\n \"acc_norm\": 0.5847953216374269,\n \"acc_norm_stderr\": 0.03779275945503201\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.23623011015911874,\n \"mc1_stderr\": 0.014869755015871114,\n \"mc2\": 0.37462221164216,\n \"mc2_stderr\": 0.014208268852646139\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.712707182320442,\n \"acc_stderr\": 0.012717481052478035\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.05913570887035633,\n \"acc_stderr\": 0.006497266660428833\n }\n}\n```", "repo_url": "https://huggingface.co/sarvamai/OpenHathi-7B-Hi-v0.1-Base", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|arc:challenge|25_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|gsm8k|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hellaswag|10_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T16-03-14.382672.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["**/details_harness|winogrande|5_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T16-03-14.382672.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T16_03_14.382672", "path": ["results_2023-12-16T16-03-14.382672.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T16-03-14.382672.parquet"]}]}]}
2023-12-16T16:06:50+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of sarvamai/OpenHathi-7B-Hi-v0.1-Base Dataset automatically created during the evaluation run of model sarvamai/OpenHathi-7B-Hi-v0.1-Base on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T16:03:14.382672(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of sarvamai/OpenHathi-7B-Hi-v0.1-Base\n\n\n\nDataset automatically created during the evaluation run of model sarvamai/OpenHathi-7B-Hi-v0.1-Base on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T16:03:14.382672(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of sarvamai/OpenHathi-7B-Hi-v0.1-Base\n\n\n\nDataset automatically created during the evaluation run of model sarvamai/OpenHathi-7B-Hi-v0.1-Base on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T16:03:14.382672(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 193, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of sarvamai/OpenHathi-7B-Hi-v0.1-Base\n\n\n\nDataset automatically created during the evaluation run of model sarvamai/OpenHathi-7B-Hi-v0.1-Base on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T16:03:14.382672(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
15e8a4795629d320f9ff09eb1bc8a2c1b852aad3
# Dataset Card for Evaluation run of jan-ai/Pandora-13B-v1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [jan-ai/Pandora-13B-v1](https://huggingface.co/jan-ai/Pandora-13B-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_jan-ai__Pandora-13B-v1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T16:04:27.777875](https://huggingface.co/datasets/open-llm-leaderboard/details_jan-ai__Pandora-13B-v1/blob/main/results_2023-12-16T16-04-27.777875.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6387960101737593, "acc_stderr": 0.03229179576508163, "acc_norm": 0.6410948778638178, "acc_norm_stderr": 0.03293998131620465, "mc1": 0.5275397796817626, "mc1_stderr": 0.01747693019071219, "mc2": 0.6576967145029661, "mc2_stderr": 0.015600354210172791 }, "harness|arc:challenge|25": { "acc": 0.6518771331058021, "acc_stderr": 0.013921008595179347, "acc_norm": 0.6706484641638225, "acc_norm_stderr": 0.013734057652635476 }, "harness|hellaswag|10": { "acc": 0.7010555666201952, "acc_stderr": 0.004568598290799867, "acc_norm": 0.8753236407090221, "acc_norm_stderr": 0.0032967643208219296 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6222222222222222, "acc_stderr": 0.04188307537595852, "acc_norm": 0.6222222222222222, "acc_norm_stderr": 0.04188307537595852 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6776315789473685, "acc_stderr": 0.03803510248351585, "acc_norm": 0.6776315789473685, "acc_norm_stderr": 0.03803510248351585 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.63, "acc_stderr": 0.04852365870939099, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6679245283018868, "acc_stderr": 0.02898545565233439, "acc_norm": 0.6679245283018868, "acc_norm_stderr": 0.02898545565233439 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7083333333333334, "acc_stderr": 0.03800968060554859, "acc_norm": 0.7083333333333334, "acc_norm_stderr": 0.03800968060554859 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.53, "acc_stderr": 0.050161355804659205, "acc_norm": 0.53, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6705202312138728, "acc_stderr": 0.03583901754736412, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.03583901754736412 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4019607843137255, "acc_stderr": 0.04878608714466996, "acc_norm": 0.4019607843137255, "acc_norm_stderr": 0.04878608714466996 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.042923469599092816, "acc_norm": 0.76, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5702127659574469, "acc_stderr": 0.03236214467715564, "acc_norm": 0.5702127659574469, "acc_norm_stderr": 0.03236214467715564 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4824561403508772, "acc_stderr": 0.04700708033551038, "acc_norm": 0.4824561403508772, "acc_norm_stderr": 0.04700708033551038 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5172413793103449, "acc_stderr": 0.04164188720169375, "acc_norm": 0.5172413793103449, "acc_norm_stderr": 0.04164188720169375 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4417989417989418, "acc_stderr": 0.02557625706125383, "acc_norm": 0.4417989417989418, "acc_norm_stderr": 0.02557625706125383 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.49206349206349204, "acc_stderr": 0.044715725362943486, "acc_norm": 0.49206349206349204, "acc_norm_stderr": 0.044715725362943486 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.42, "acc_stderr": 0.04960449637488584, "acc_norm": 0.42, "acc_norm_stderr": 0.04960449637488584 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7612903225806451, "acc_stderr": 0.02425107126220884, "acc_norm": 0.7612903225806451, "acc_norm_stderr": 0.02425107126220884 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.46798029556650245, "acc_stderr": 0.03510766597959215, "acc_norm": 0.46798029556650245, "acc_norm_stderr": 0.03510766597959215 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7818181818181819, "acc_stderr": 0.03225078108306289, "acc_norm": 0.7818181818181819, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8080808080808081, "acc_stderr": 0.02805779167298902, "acc_norm": 0.8080808080808081, "acc_norm_stderr": 0.02805779167298902 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8756476683937824, "acc_stderr": 0.023814477086593535, "acc_norm": 0.8756476683937824, "acc_norm_stderr": 0.023814477086593535 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6743589743589744, "acc_stderr": 0.02375966576741229, "acc_norm": 0.6743589743589744, "acc_norm_stderr": 0.02375966576741229 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3074074074074074, "acc_stderr": 0.028133252578815632, "acc_norm": 0.3074074074074074, "acc_norm_stderr": 0.028133252578815632 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.680672268907563, "acc_stderr": 0.030283995525884396, "acc_norm": 0.680672268907563, "acc_norm_stderr": 0.030283995525884396 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.36423841059602646, "acc_stderr": 0.039291117812427424, "acc_norm": 0.36423841059602646, "acc_norm_stderr": 0.039291117812427424 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8495412844036697, "acc_stderr": 0.015328563932669235, "acc_norm": 0.8495412844036697, "acc_norm_stderr": 0.015328563932669235 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5046296296296297, "acc_stderr": 0.03409825519163572, "acc_norm": 0.5046296296296297, "acc_norm_stderr": 0.03409825519163572 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7794117647058824, "acc_stderr": 0.02910225438967408, "acc_norm": 0.7794117647058824, "acc_norm_stderr": 0.02910225438967408 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7890295358649789, "acc_stderr": 0.026558372502661916, "acc_norm": 0.7890295358649789, "acc_norm_stderr": 0.026558372502661916 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7219730941704036, "acc_stderr": 0.030069584874494043, "acc_norm": 0.7219730941704036, "acc_norm_stderr": 0.030069584874494043 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7709923664122137, "acc_stderr": 0.036853466317118506, "acc_norm": 0.7709923664122137, "acc_norm_stderr": 0.036853466317118506 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7768595041322314, "acc_stderr": 0.03800754475228733, "acc_norm": 0.7768595041322314, "acc_norm_stderr": 0.03800754475228733 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7407407407407407, "acc_stderr": 0.04236511258094632, "acc_norm": 0.7407407407407407, "acc_norm_stderr": 0.04236511258094632 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7484662576687117, "acc_stderr": 0.03408997886857529, "acc_norm": 0.7484662576687117, "acc_norm_stderr": 0.03408997886857529 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.41964285714285715, "acc_stderr": 0.04684099321077106, "acc_norm": 0.41964285714285715, "acc_norm_stderr": 0.04684099321077106 }, "harness|hendrycksTest-management|5": { "acc": 0.8058252427184466, "acc_stderr": 0.039166677628225836, "acc_norm": 0.8058252427184466, "acc_norm_stderr": 0.039166677628225836 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8803418803418803, "acc_stderr": 0.021262719400406964, "acc_norm": 0.8803418803418803, "acc_norm_stderr": 0.021262719400406964 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8109833971902938, "acc_stderr": 0.014000791294407006, "acc_norm": 0.8109833971902938, "acc_norm_stderr": 0.014000791294407006 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6820809248554913, "acc_stderr": 0.025070713719153186, "acc_norm": 0.6820809248554913, "acc_norm_stderr": 0.025070713719153186 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.37318435754189944, "acc_stderr": 0.01617569201338196, "acc_norm": 0.37318435754189944, "acc_norm_stderr": 0.01617569201338196 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7026143790849673, "acc_stderr": 0.02617390850671858, "acc_norm": 0.7026143790849673, "acc_norm_stderr": 0.02617390850671858 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.707395498392283, "acc_stderr": 0.02583989833487798, "acc_norm": 0.707395498392283, "acc_norm_stderr": 0.02583989833487798 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7345679012345679, "acc_stderr": 0.024569223600460845, "acc_norm": 0.7345679012345679, "acc_norm_stderr": 0.024569223600460845 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5, "acc_stderr": 0.029827499313594685, "acc_norm": 0.5, "acc_norm_stderr": 0.029827499313594685 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4745762711864407, "acc_stderr": 0.012753716929101004, "acc_norm": 0.4745762711864407, "acc_norm_stderr": 0.012753716929101004 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6764705882352942, "acc_stderr": 0.02841820861940676, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.02841820861940676 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6650326797385621, "acc_stderr": 0.019094228167000325, "acc_norm": 0.6650326797385621, "acc_norm_stderr": 0.019094228167000325 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6818181818181818, "acc_stderr": 0.044612721759105085, "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.044612721759105085 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7224489795918367, "acc_stderr": 0.028666857790274648, "acc_norm": 0.7224489795918367, "acc_norm_stderr": 0.028666857790274648 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8606965174129353, "acc_stderr": 0.02448448716291397, "acc_norm": 0.8606965174129353, "acc_norm_stderr": 0.02448448716291397 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.0348735088019777, "acc_norm": 0.86, "acc_norm_stderr": 0.0348735088019777 }, "harness|hendrycksTest-virology|5": { "acc": 0.5301204819277109, "acc_stderr": 0.03885425420866767, "acc_norm": 0.5301204819277109, "acc_norm_stderr": 0.03885425420866767 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7953216374269005, "acc_stderr": 0.03094445977853321, "acc_norm": 0.7953216374269005, "acc_norm_stderr": 0.03094445977853321 }, "harness|truthfulqa:mc|0": { "mc1": 0.5275397796817626, "mc1_stderr": 0.01747693019071219, "mc2": 0.6576967145029661, "mc2_stderr": 0.015600354210172791 }, "harness|winogrande|5": { "acc": 0.8050513022888713, "acc_stderr": 0.011134099415938275 }, "harness|gsm8k|5": { "acc": 0.5299469294920395, "acc_stderr": 0.013747759685444703 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_jan-ai__Pandora-13B-v1
[ "region:us" ]
2023-12-16T16:07:22+00:00
{"pretty_name": "Evaluation run of jan-ai/Pandora-13B-v1", "dataset_summary": "Dataset automatically created during the evaluation run of model [jan-ai/Pandora-13B-v1](https://huggingface.co/jan-ai/Pandora-13B-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jan-ai__Pandora-13B-v1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T16:04:27.777875](https://huggingface.co/datasets/open-llm-leaderboard/details_jan-ai__Pandora-13B-v1/blob/main/results_2023-12-16T16-04-27.777875.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6387960101737593,\n \"acc_stderr\": 0.03229179576508163,\n \"acc_norm\": 0.6410948778638178,\n \"acc_norm_stderr\": 0.03293998131620465,\n \"mc1\": 0.5275397796817626,\n \"mc1_stderr\": 0.01747693019071219,\n \"mc2\": 0.6576967145029661,\n \"mc2_stderr\": 0.015600354210172791\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6518771331058021,\n \"acc_stderr\": 0.013921008595179347,\n \"acc_norm\": 0.6706484641638225,\n \"acc_norm_stderr\": 0.013734057652635476\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7010555666201952,\n \"acc_stderr\": 0.004568598290799867,\n \"acc_norm\": 0.8753236407090221,\n \"acc_norm_stderr\": 0.0032967643208219296\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6222222222222222,\n \"acc_stderr\": 0.04188307537595852,\n \"acc_norm\": 0.6222222222222222,\n \"acc_norm_stderr\": 0.04188307537595852\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6776315789473685,\n \"acc_stderr\": 0.03803510248351585,\n \"acc_norm\": 0.6776315789473685,\n \"acc_norm_stderr\": 0.03803510248351585\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6679245283018868,\n \"acc_stderr\": 0.02898545565233439,\n \"acc_norm\": 0.6679245283018868,\n \"acc_norm_stderr\": 0.02898545565233439\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7083333333333334,\n \"acc_stderr\": 0.03800968060554859,\n \"acc_norm\": 0.7083333333333334,\n \"acc_norm_stderr\": 0.03800968060554859\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.03583901754736412,\n \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.03583901754736412\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4019607843137255,\n \"acc_stderr\": 0.04878608714466996,\n \"acc_norm\": 0.4019607843137255,\n \"acc_norm_stderr\": 0.04878608714466996\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5702127659574469,\n \"acc_stderr\": 0.03236214467715564,\n \"acc_norm\": 0.5702127659574469,\n \"acc_norm_stderr\": 0.03236214467715564\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4824561403508772,\n \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.4824561403508772,\n \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5172413793103449,\n \"acc_stderr\": 0.04164188720169375,\n \"acc_norm\": 0.5172413793103449,\n \"acc_norm_stderr\": 0.04164188720169375\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4417989417989418,\n \"acc_stderr\": 0.02557625706125383,\n \"acc_norm\": 0.4417989417989418,\n \"acc_norm_stderr\": 0.02557625706125383\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.49206349206349204,\n \"acc_stderr\": 0.044715725362943486,\n \"acc_norm\": 0.49206349206349204,\n \"acc_norm_stderr\": 0.044715725362943486\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.04960449637488584,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.04960449637488584\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7612903225806451,\n \"acc_stderr\": 0.02425107126220884,\n \"acc_norm\": 0.7612903225806451,\n \"acc_norm_stderr\": 0.02425107126220884\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.46798029556650245,\n \"acc_stderr\": 0.03510766597959215,\n \"acc_norm\": 0.46798029556650245,\n \"acc_norm_stderr\": 0.03510766597959215\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8080808080808081,\n \"acc_stderr\": 0.02805779167298902,\n \"acc_norm\": 0.8080808080808081,\n \"acc_norm_stderr\": 0.02805779167298902\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8756476683937824,\n \"acc_stderr\": 0.023814477086593535,\n \"acc_norm\": 0.8756476683937824,\n \"acc_norm_stderr\": 0.023814477086593535\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6743589743589744,\n \"acc_stderr\": 0.02375966576741229,\n \"acc_norm\": 0.6743589743589744,\n \"acc_norm_stderr\": 0.02375966576741229\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3074074074074074,\n \"acc_stderr\": 0.028133252578815632,\n \"acc_norm\": 0.3074074074074074,\n \"acc_norm_stderr\": 0.028133252578815632\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.680672268907563,\n \"acc_stderr\": 0.030283995525884396,\n \"acc_norm\": 0.680672268907563,\n \"acc_norm_stderr\": 0.030283995525884396\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.36423841059602646,\n \"acc_stderr\": 0.039291117812427424,\n \"acc_norm\": 0.36423841059602646,\n \"acc_norm_stderr\": 0.039291117812427424\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8495412844036697,\n \"acc_stderr\": 0.015328563932669235,\n \"acc_norm\": 0.8495412844036697,\n \"acc_norm_stderr\": 0.015328563932669235\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5046296296296297,\n \"acc_stderr\": 0.03409825519163572,\n \"acc_norm\": 0.5046296296296297,\n \"acc_norm_stderr\": 0.03409825519163572\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7794117647058824,\n \"acc_stderr\": 0.02910225438967408,\n \"acc_norm\": 0.7794117647058824,\n \"acc_norm_stderr\": 0.02910225438967408\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7890295358649789,\n \"acc_stderr\": 0.026558372502661916,\n \"acc_norm\": 0.7890295358649789,\n \"acc_norm_stderr\": 0.026558372502661916\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7219730941704036,\n \"acc_stderr\": 0.030069584874494043,\n \"acc_norm\": 0.7219730941704036,\n \"acc_norm_stderr\": 0.030069584874494043\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7709923664122137,\n \"acc_stderr\": 0.036853466317118506,\n \"acc_norm\": 0.7709923664122137,\n \"acc_norm_stderr\": 0.036853466317118506\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228733,\n \"acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228733\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7407407407407407,\n \"acc_stderr\": 0.04236511258094632,\n \"acc_norm\": 0.7407407407407407,\n \"acc_norm_stderr\": 0.04236511258094632\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7484662576687117,\n \"acc_stderr\": 0.03408997886857529,\n \"acc_norm\": 0.7484662576687117,\n \"acc_norm_stderr\": 0.03408997886857529\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.41964285714285715,\n \"acc_stderr\": 0.04684099321077106,\n \"acc_norm\": 0.41964285714285715,\n \"acc_norm_stderr\": 0.04684099321077106\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8058252427184466,\n \"acc_stderr\": 0.039166677628225836,\n \"acc_norm\": 0.8058252427184466,\n \"acc_norm_stderr\": 0.039166677628225836\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8803418803418803,\n \"acc_stderr\": 0.021262719400406964,\n \"acc_norm\": 0.8803418803418803,\n \"acc_norm_stderr\": 0.021262719400406964\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8109833971902938,\n \"acc_stderr\": 0.014000791294407006,\n \"acc_norm\": 0.8109833971902938,\n \"acc_norm_stderr\": 0.014000791294407006\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6820809248554913,\n \"acc_stderr\": 0.025070713719153186,\n \"acc_norm\": 0.6820809248554913,\n \"acc_norm_stderr\": 0.025070713719153186\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.37318435754189944,\n \"acc_stderr\": 0.01617569201338196,\n \"acc_norm\": 0.37318435754189944,\n \"acc_norm_stderr\": 0.01617569201338196\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7026143790849673,\n \"acc_stderr\": 0.02617390850671858,\n \"acc_norm\": 0.7026143790849673,\n \"acc_norm_stderr\": 0.02617390850671858\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.707395498392283,\n \"acc_stderr\": 0.02583989833487798,\n \"acc_norm\": 0.707395498392283,\n \"acc_norm_stderr\": 0.02583989833487798\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7345679012345679,\n \"acc_stderr\": 0.024569223600460845,\n \"acc_norm\": 0.7345679012345679,\n \"acc_norm_stderr\": 0.024569223600460845\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.029827499313594685,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.029827499313594685\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4745762711864407,\n \"acc_stderr\": 0.012753716929101004,\n \"acc_norm\": 0.4745762711864407,\n \"acc_norm_stderr\": 0.012753716929101004\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.02841820861940676,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.02841820861940676\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6650326797385621,\n \"acc_stderr\": 0.019094228167000325,\n \"acc_norm\": 0.6650326797385621,\n \"acc_norm_stderr\": 0.019094228167000325\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6818181818181818,\n \"acc_stderr\": 0.044612721759105085,\n \"acc_norm\": 0.6818181818181818,\n \"acc_norm_stderr\": 0.044612721759105085\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7224489795918367,\n \"acc_stderr\": 0.028666857790274648,\n \"acc_norm\": 0.7224489795918367,\n \"acc_norm_stderr\": 0.028666857790274648\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8606965174129353,\n \"acc_stderr\": 0.02448448716291397,\n \"acc_norm\": 0.8606965174129353,\n \"acc_norm_stderr\": 0.02448448716291397\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.0348735088019777,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.0348735088019777\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5301204819277109,\n \"acc_stderr\": 0.03885425420866767,\n \"acc_norm\": 0.5301204819277109,\n \"acc_norm_stderr\": 0.03885425420866767\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7953216374269005,\n \"acc_stderr\": 0.03094445977853321,\n \"acc_norm\": 0.7953216374269005,\n \"acc_norm_stderr\": 0.03094445977853321\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5275397796817626,\n \"mc1_stderr\": 0.01747693019071219,\n \"mc2\": 0.6576967145029661,\n \"mc2_stderr\": 0.015600354210172791\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8050513022888713,\n \"acc_stderr\": 0.011134099415938275\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5299469294920395,\n \"acc_stderr\": 0.013747759685444703\n }\n}\n```", "repo_url": "https://huggingface.co/jan-ai/Pandora-13B-v1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|arc:challenge|25_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|gsm8k|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hellaswag|10_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T16-04-27.777875.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["**/details_harness|winogrande|5_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T16-04-27.777875.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T16_04_27.777875", "path": ["results_2023-12-16T16-04-27.777875.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T16-04-27.777875.parquet"]}]}]}
2023-12-16T16:08:05+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of jan-ai/Pandora-13B-v1 Dataset automatically created during the evaluation run of model jan-ai/Pandora-13B-v1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T16:04:27.777875(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of jan-ai/Pandora-13B-v1\n\n\n\nDataset automatically created during the evaluation run of model jan-ai/Pandora-13B-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T16:04:27.777875(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of jan-ai/Pandora-13B-v1\n\n\n\nDataset automatically created during the evaluation run of model jan-ai/Pandora-13B-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T16:04:27.777875(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 183, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of jan-ai/Pandora-13B-v1\n\n\n\nDataset automatically created during the evaluation run of model jan-ai/Pandora-13B-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T16:04:27.777875(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
1ab57a73c27e52e4f4569eb34ea6e5e34011c442
This is sample dataset for cookGPT , it has around 6000 Indian Food receipe in guanaco format trading data "### Human: give me recipe for Masala Karela Recipe ### Assistant: To begin making the Masala Karela Recipe,de-seed the karela and slice. Ingredients 6 Karela (Bitter Gourd/ Pavakkai) - deseeded,Salt - to taste,1 Onion - thinly sliced,3 tablespoon Gram flour (besan),2 teaspoo... Cook Time: 30 Cuisine: Indian Diet: Diabetic Friendly"
VishalMysore/cookGPT
[ "license:apache-2.0", "region:us" ]
2023-12-16T16:11:23+00:00
{"license": "apache-2.0"}
2023-12-17T16:02:04+00:00
[]
[]
TAGS #license-apache-2.0 #region-us
This is sample dataset for cookGPT , it has around 6000 Indian Food receipe in guanaco format trading data "### Human: give me recipe for Masala Karela Recipe ### Assistant: To begin making the Masala Karela Recipe,de-seed the karela and slice. Ingredients 6 Karela (Bitter Gourd/ Pavakkai) - deseeded,Salt - to taste,1 Onion - thinly sliced,3 tablespoon Gram flour (besan),2 teaspoo... Cook Time: 30 Cuisine: Indian Diet: Diabetic Friendly"
[ "### Human: give me recipe for Masala Karela Recipe ### Assistant: To begin making the Masala Karela Recipe,de-seed the karela and slice. \nIngredients 6 Karela (Bitter Gourd/ Pavakkai) - deseeded,Salt - to taste,1 Onion - thinly sliced,3 tablespoon Gram flour (besan),2 teaspoo...\nCook Time: 30\nCuisine: Indian\nDiet: Diabetic Friendly\"" ]
[ "TAGS\n#license-apache-2.0 #region-us \n", "### Human: give me recipe for Masala Karela Recipe ### Assistant: To begin making the Masala Karela Recipe,de-seed the karela and slice. \nIngredients 6 Karela (Bitter Gourd/ Pavakkai) - deseeded,Salt - to taste,1 Onion - thinly sliced,3 tablespoon Gram flour (besan),2 teaspoo...\nCook Time: 30\nCuisine: Indian\nDiet: Diabetic Friendly\"" ]
[ 14, 107 ]
[ "passage: TAGS\n#license-apache-2.0 #region-us \n### Human: give me recipe for Masala Karela Recipe ### Assistant: To begin making the Masala Karela Recipe,de-seed the karela and slice. \nIngredients 6 Karela (Bitter Gourd/ Pavakkai) - deseeded,Salt - to taste,1 Onion - thinly sliced,3 tablespoon Gram flour (besan),2 teaspoo...\nCook Time: 30\nCuisine: Indian\nDiet: Diabetic Friendly\"" ]
aaa83b89b6949efae20b6a47f574a691f2fa4c98
# Dataset Card for Evaluation run of kyujinpy/SOLAR-Platypus-10.7B-v2 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [kyujinpy/SOLAR-Platypus-10.7B-v2](https://huggingface.co/kyujinpy/SOLAR-Platypus-10.7B-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_kyujinpy__SOLAR-Platypus-10.7B-v2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T16:14:50.048840](https://huggingface.co/datasets/open-llm-leaderboard/details_kyujinpy__SOLAR-Platypus-10.7B-v2/blob/main/results_2023-12-16T16-14-50.048840.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5933977113371075, "acc_stderr": 0.033089600641254734, "acc_norm": 0.6032526200271864, "acc_norm_stderr": 0.033912305079181165, "mc1": 0.2876376988984088, "mc1_stderr": 0.0158463151013948, "mc2": 0.4314947895428414, "mc2_stderr": 0.014252289388190327 }, "harness|arc:challenge|25": { "acc": 0.5443686006825939, "acc_stderr": 0.01455374993930686, "acc_norm": 0.5938566552901023, "acc_norm_stderr": 0.014351656690097862 }, "harness|hellaswag|10": { "acc": 0.6358295160326628, "acc_stderr": 0.004802133511654238, "acc_norm": 0.8356901015733917, "acc_norm_stderr": 0.003697992356124479 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.04793724854411022, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411022 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6222222222222222, "acc_stderr": 0.04188307537595853, "acc_norm": 0.6222222222222222, "acc_norm_stderr": 0.04188307537595853 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6578947368421053, "acc_stderr": 0.03860731599316092, "acc_norm": 0.6578947368421053, "acc_norm_stderr": 0.03860731599316092 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.62, "acc_stderr": 0.04878317312145632, "acc_norm": 0.62, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6490566037735849, "acc_stderr": 0.02937364625323469, "acc_norm": 0.6490566037735849, "acc_norm_stderr": 0.02937364625323469 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7152777777777778, "acc_stderr": 0.03773809990686934, "acc_norm": 0.7152777777777778, "acc_norm_stderr": 0.03773809990686934 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6242774566473989, "acc_stderr": 0.036928207672648664, "acc_norm": 0.6242774566473989, "acc_norm_stderr": 0.036928207672648664 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4019607843137255, "acc_stderr": 0.04878608714466996, "acc_norm": 0.4019607843137255, "acc_norm_stderr": 0.04878608714466996 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.73, "acc_stderr": 0.0446196043338474, "acc_norm": 0.73, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4553191489361702, "acc_stderr": 0.032555253593403555, "acc_norm": 0.4553191489361702, "acc_norm_stderr": 0.032555253593403555 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.42105263157894735, "acc_stderr": 0.046446020912223177, "acc_norm": 0.42105263157894735, "acc_norm_stderr": 0.046446020912223177 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.42758620689655175, "acc_stderr": 0.04122737111370331, "acc_norm": 0.42758620689655175, "acc_norm_stderr": 0.04122737111370331 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3888888888888889, "acc_stderr": 0.02510742548113729, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.02510742548113729 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.38095238095238093, "acc_stderr": 0.04343525428949098, "acc_norm": 0.38095238095238093, "acc_norm_stderr": 0.04343525428949098 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7354838709677419, "acc_stderr": 0.02509189237885928, "acc_norm": 0.7354838709677419, "acc_norm_stderr": 0.02509189237885928 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4482758620689655, "acc_stderr": 0.03499113137676744, "acc_norm": 0.4482758620689655, "acc_norm_stderr": 0.03499113137676744 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7878787878787878, "acc_stderr": 0.03192271569548301, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.03192271569548301 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7878787878787878, "acc_stderr": 0.029126522834586794, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.029126522834586794 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8601036269430051, "acc_stderr": 0.025033870583015178, "acc_norm": 0.8601036269430051, "acc_norm_stderr": 0.025033870583015178 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5641025641025641, "acc_stderr": 0.025141801511177498, "acc_norm": 0.5641025641025641, "acc_norm_stderr": 0.025141801511177498 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3148148148148148, "acc_stderr": 0.028317533496066482, "acc_norm": 0.3148148148148148, "acc_norm_stderr": 0.028317533496066482 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5882352941176471, "acc_stderr": 0.03196876989195778, "acc_norm": 0.5882352941176471, "acc_norm_stderr": 0.03196876989195778 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.038020397601079024, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.038020397601079024 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7908256880733945, "acc_stderr": 0.017437937173343233, "acc_norm": 0.7908256880733945, "acc_norm_stderr": 0.017437937173343233 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.49074074074074076, "acc_stderr": 0.034093869469927006, "acc_norm": 0.49074074074074076, "acc_norm_stderr": 0.034093869469927006 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7892156862745098, "acc_stderr": 0.028626547912437416, "acc_norm": 0.7892156862745098, "acc_norm_stderr": 0.028626547912437416 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8059071729957806, "acc_stderr": 0.02574490253229092, "acc_norm": 0.8059071729957806, "acc_norm_stderr": 0.02574490253229092 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6771300448430493, "acc_stderr": 0.03138147637575499, "acc_norm": 0.6771300448430493, "acc_norm_stderr": 0.03138147637575499 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6030534351145038, "acc_stderr": 0.04291135671009224, "acc_norm": 0.6030534351145038, "acc_norm_stderr": 0.04291135671009224 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7520661157024794, "acc_stderr": 0.03941897526516301, "acc_norm": 0.7520661157024794, "acc_norm_stderr": 0.03941897526516301 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6851851851851852, "acc_stderr": 0.04489931073591312, "acc_norm": 0.6851851851851852, "acc_norm_stderr": 0.04489931073591312 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7239263803680982, "acc_stderr": 0.035123852837050475, "acc_norm": 0.7239263803680982, "acc_norm_stderr": 0.035123852837050475 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.48214285714285715, "acc_stderr": 0.047427623612430116, "acc_norm": 0.48214285714285715, "acc_norm_stderr": 0.047427623612430116 }, "harness|hendrycksTest-management|5": { "acc": 0.7572815533980582, "acc_stderr": 0.04245022486384495, "acc_norm": 0.7572815533980582, "acc_norm_stderr": 0.04245022486384495 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8547008547008547, "acc_stderr": 0.023086635086841407, "acc_norm": 0.8547008547008547, "acc_norm_stderr": 0.023086635086841407 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.68, "acc_stderr": 0.04688261722621505, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7982120051085568, "acc_stderr": 0.014351702181636864, "acc_norm": 0.7982120051085568, "acc_norm_stderr": 0.014351702181636864 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6184971098265896, "acc_stderr": 0.02615219861972679, "acc_norm": 0.6184971098265896, "acc_norm_stderr": 0.02615219861972679 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3653631284916201, "acc_stderr": 0.016104833880142295, "acc_norm": 0.3653631284916201, "acc_norm_stderr": 0.016104833880142295 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6111111111111112, "acc_stderr": 0.027914055510468008, "acc_norm": 0.6111111111111112, "acc_norm_stderr": 0.027914055510468008 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6591639871382636, "acc_stderr": 0.026920841260776165, "acc_norm": 0.6591639871382636, "acc_norm_stderr": 0.026920841260776165 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6790123456790124, "acc_stderr": 0.025976566010862748, "acc_norm": 0.6790123456790124, "acc_norm_stderr": 0.025976566010862748 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4645390070921986, "acc_stderr": 0.02975238965742705, "acc_norm": 0.4645390070921986, "acc_norm_stderr": 0.02975238965742705 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.43415906127770537, "acc_stderr": 0.01265903323706725, "acc_norm": 0.43415906127770537, "acc_norm_stderr": 0.01265903323706725 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5955882352941176, "acc_stderr": 0.02981263070156974, "acc_norm": 0.5955882352941176, "acc_norm_stderr": 0.02981263070156974 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.619281045751634, "acc_stderr": 0.019643801557924806, "acc_norm": 0.619281045751634, "acc_norm_stderr": 0.019643801557924806 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6636363636363637, "acc_stderr": 0.04525393596302506, "acc_norm": 0.6636363636363637, "acc_norm_stderr": 0.04525393596302506 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.5551020408163265, "acc_stderr": 0.031814251181977865, "acc_norm": 0.5551020408163265, "acc_norm_stderr": 0.031814251181977865 }, "harness|hendrycksTest-sociology|5": { "acc": 0.746268656716418, "acc_stderr": 0.030769444967296018, "acc_norm": 0.746268656716418, "acc_norm_stderr": 0.030769444967296018 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.79, "acc_stderr": 0.040936018074033256, "acc_norm": 0.79, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-virology|5": { "acc": 0.4457831325301205, "acc_stderr": 0.03869543323472101, "acc_norm": 0.4457831325301205, "acc_norm_stderr": 0.03869543323472101 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7953216374269005, "acc_stderr": 0.030944459778533207, "acc_norm": 0.7953216374269005, "acc_norm_stderr": 0.030944459778533207 }, "harness|truthfulqa:mc|0": { "mc1": 0.2876376988984088, "mc1_stderr": 0.0158463151013948, "mc2": 0.4314947895428414, "mc2_stderr": 0.014252289388190327 }, "harness|winogrande|5": { "acc": 0.8145224940805051, "acc_stderr": 0.010923965303140503 }, "harness|gsm8k|5": { "acc": 0.0401819560272934, "acc_stderr": 0.005409439736970511 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_kyujinpy__SOLAR-Platypus-10.7B-v2
[ "region:us" ]
2023-12-16T16:17:46+00:00
{"pretty_name": "Evaluation run of kyujinpy/SOLAR-Platypus-10.7B-v2", "dataset_summary": "Dataset automatically created during the evaluation run of model [kyujinpy/SOLAR-Platypus-10.7B-v2](https://huggingface.co/kyujinpy/SOLAR-Platypus-10.7B-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_kyujinpy__SOLAR-Platypus-10.7B-v2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T16:14:50.048840](https://huggingface.co/datasets/open-llm-leaderboard/details_kyujinpy__SOLAR-Platypus-10.7B-v2/blob/main/results_2023-12-16T16-14-50.048840.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5933977113371075,\n \"acc_stderr\": 0.033089600641254734,\n \"acc_norm\": 0.6032526200271864,\n \"acc_norm_stderr\": 0.033912305079181165,\n \"mc1\": 0.2876376988984088,\n \"mc1_stderr\": 0.0158463151013948,\n \"mc2\": 0.4314947895428414,\n \"mc2_stderr\": 0.014252289388190327\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5443686006825939,\n \"acc_stderr\": 0.01455374993930686,\n \"acc_norm\": 0.5938566552901023,\n \"acc_norm_stderr\": 0.014351656690097862\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6358295160326628,\n \"acc_stderr\": 0.004802133511654238,\n \"acc_norm\": 0.8356901015733917,\n \"acc_norm_stderr\": 0.003697992356124479\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411022,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411022\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6222222222222222,\n \"acc_stderr\": 0.04188307537595853,\n \"acc_norm\": 0.6222222222222222,\n \"acc_norm_stderr\": 0.04188307537595853\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6578947368421053,\n \"acc_stderr\": 0.03860731599316092,\n \"acc_norm\": 0.6578947368421053,\n \"acc_norm_stderr\": 0.03860731599316092\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6490566037735849,\n \"acc_stderr\": 0.02937364625323469,\n \"acc_norm\": 0.6490566037735849,\n \"acc_norm_stderr\": 0.02937364625323469\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7152777777777778,\n \"acc_stderr\": 0.03773809990686934,\n \"acc_norm\": 0.7152777777777778,\n \"acc_norm_stderr\": 0.03773809990686934\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6242774566473989,\n \"acc_stderr\": 0.036928207672648664,\n \"acc_norm\": 0.6242774566473989,\n \"acc_norm_stderr\": 0.036928207672648664\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4019607843137255,\n \"acc_stderr\": 0.04878608714466996,\n \"acc_norm\": 0.4019607843137255,\n \"acc_norm_stderr\": 0.04878608714466996\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.0446196043338474,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.4553191489361702,\n \"acc_stderr\": 0.032555253593403555,\n \"acc_norm\": 0.4553191489361702,\n \"acc_norm_stderr\": 0.032555253593403555\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.42105263157894735,\n \"acc_stderr\": 0.046446020912223177,\n \"acc_norm\": 0.42105263157894735,\n \"acc_norm_stderr\": 0.046446020912223177\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.42758620689655175,\n \"acc_stderr\": 0.04122737111370331,\n \"acc_norm\": 0.42758620689655175,\n \"acc_norm_stderr\": 0.04122737111370331\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3888888888888889,\n \"acc_stderr\": 0.02510742548113729,\n \"acc_norm\": 0.3888888888888889,\n \"acc_norm_stderr\": 0.02510742548113729\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.38095238095238093,\n \"acc_stderr\": 0.04343525428949098,\n \"acc_norm\": 0.38095238095238093,\n \"acc_norm_stderr\": 0.04343525428949098\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7354838709677419,\n \"acc_stderr\": 0.02509189237885928,\n \"acc_norm\": 0.7354838709677419,\n \"acc_norm_stderr\": 0.02509189237885928\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4482758620689655,\n \"acc_stderr\": 0.03499113137676744,\n \"acc_norm\": 0.4482758620689655,\n \"acc_norm_stderr\": 0.03499113137676744\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.03192271569548301,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.03192271569548301\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.029126522834586794,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.029126522834586794\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8601036269430051,\n \"acc_stderr\": 0.025033870583015178,\n \"acc_norm\": 0.8601036269430051,\n \"acc_norm_stderr\": 0.025033870583015178\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5641025641025641,\n \"acc_stderr\": 0.025141801511177498,\n \"acc_norm\": 0.5641025641025641,\n \"acc_norm_stderr\": 0.025141801511177498\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3148148148148148,\n \"acc_stderr\": 0.028317533496066482,\n \"acc_norm\": 0.3148148148148148,\n \"acc_norm_stderr\": 0.028317533496066482\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.5882352941176471,\n \"acc_stderr\": 0.03196876989195778,\n \"acc_norm\": 0.5882352941176471,\n \"acc_norm_stderr\": 0.03196876989195778\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31788079470198677,\n \"acc_stderr\": 0.038020397601079024,\n \"acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.038020397601079024\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7908256880733945,\n \"acc_stderr\": 0.017437937173343233,\n \"acc_norm\": 0.7908256880733945,\n \"acc_norm_stderr\": 0.017437937173343233\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.49074074074074076,\n \"acc_stderr\": 0.034093869469927006,\n \"acc_norm\": 0.49074074074074076,\n \"acc_norm_stderr\": 0.034093869469927006\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7892156862745098,\n \"acc_stderr\": 0.028626547912437416,\n \"acc_norm\": 0.7892156862745098,\n \"acc_norm_stderr\": 0.028626547912437416\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8059071729957806,\n \"acc_stderr\": 0.02574490253229092,\n \"acc_norm\": 0.8059071729957806,\n \"acc_norm_stderr\": 0.02574490253229092\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6771300448430493,\n \"acc_stderr\": 0.03138147637575499,\n \"acc_norm\": 0.6771300448430493,\n \"acc_norm_stderr\": 0.03138147637575499\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6030534351145038,\n \"acc_stderr\": 0.04291135671009224,\n \"acc_norm\": 0.6030534351145038,\n \"acc_norm_stderr\": 0.04291135671009224\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7520661157024794,\n \"acc_stderr\": 0.03941897526516301,\n \"acc_norm\": 0.7520661157024794,\n \"acc_norm_stderr\": 0.03941897526516301\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6851851851851852,\n \"acc_stderr\": 0.04489931073591312,\n \"acc_norm\": 0.6851851851851852,\n \"acc_norm_stderr\": 0.04489931073591312\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7239263803680982,\n \"acc_stderr\": 0.035123852837050475,\n \"acc_norm\": 0.7239263803680982,\n \"acc_norm_stderr\": 0.035123852837050475\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.48214285714285715,\n \"acc_stderr\": 0.047427623612430116,\n \"acc_norm\": 0.48214285714285715,\n \"acc_norm_stderr\": 0.047427623612430116\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7572815533980582,\n \"acc_stderr\": 0.04245022486384495,\n \"acc_norm\": 0.7572815533980582,\n \"acc_norm_stderr\": 0.04245022486384495\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8547008547008547,\n \"acc_stderr\": 0.023086635086841407,\n \"acc_norm\": 0.8547008547008547,\n \"acc_norm_stderr\": 0.023086635086841407\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7982120051085568,\n \"acc_stderr\": 0.014351702181636864,\n \"acc_norm\": 0.7982120051085568,\n \"acc_norm_stderr\": 0.014351702181636864\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6184971098265896,\n \"acc_stderr\": 0.02615219861972679,\n \"acc_norm\": 0.6184971098265896,\n \"acc_norm_stderr\": 0.02615219861972679\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3653631284916201,\n \"acc_stderr\": 0.016104833880142295,\n \"acc_norm\": 0.3653631284916201,\n \"acc_norm_stderr\": 0.016104833880142295\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6111111111111112,\n \"acc_stderr\": 0.027914055510468008,\n \"acc_norm\": 0.6111111111111112,\n \"acc_norm_stderr\": 0.027914055510468008\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6591639871382636,\n \"acc_stderr\": 0.026920841260776165,\n \"acc_norm\": 0.6591639871382636,\n \"acc_norm_stderr\": 0.026920841260776165\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6790123456790124,\n \"acc_stderr\": 0.025976566010862748,\n \"acc_norm\": 0.6790123456790124,\n \"acc_norm_stderr\": 0.025976566010862748\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4645390070921986,\n \"acc_stderr\": 0.02975238965742705,\n \"acc_norm\": 0.4645390070921986,\n \"acc_norm_stderr\": 0.02975238965742705\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.43415906127770537,\n \"acc_stderr\": 0.01265903323706725,\n \"acc_norm\": 0.43415906127770537,\n \"acc_norm_stderr\": 0.01265903323706725\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5955882352941176,\n \"acc_stderr\": 0.02981263070156974,\n \"acc_norm\": 0.5955882352941176,\n \"acc_norm_stderr\": 0.02981263070156974\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.619281045751634,\n \"acc_stderr\": 0.019643801557924806,\n \"acc_norm\": 0.619281045751634,\n \"acc_norm_stderr\": 0.019643801557924806\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.5551020408163265,\n \"acc_stderr\": 0.031814251181977865,\n \"acc_norm\": 0.5551020408163265,\n \"acc_norm_stderr\": 0.031814251181977865\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.746268656716418,\n \"acc_stderr\": 0.030769444967296018,\n \"acc_norm\": 0.746268656716418,\n \"acc_norm_stderr\": 0.030769444967296018\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4457831325301205,\n \"acc_stderr\": 0.03869543323472101,\n \"acc_norm\": 0.4457831325301205,\n \"acc_norm_stderr\": 0.03869543323472101\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7953216374269005,\n \"acc_stderr\": 0.030944459778533207,\n \"acc_norm\": 0.7953216374269005,\n \"acc_norm_stderr\": 0.030944459778533207\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2876376988984088,\n \"mc1_stderr\": 0.0158463151013948,\n \"mc2\": 0.4314947895428414,\n \"mc2_stderr\": 0.014252289388190327\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8145224940805051,\n \"acc_stderr\": 0.010923965303140503\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0401819560272934,\n \"acc_stderr\": 0.005409439736970511\n }\n}\n```", "repo_url": "https://huggingface.co/kyujinpy/SOLAR-Platypus-10.7B-v2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|arc:challenge|25_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|gsm8k|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hellaswag|10_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T16-14-50.048840.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["**/details_harness|winogrande|5_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T16-14-50.048840.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T16_14_50.048840", "path": ["results_2023-12-16T16-14-50.048840.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T16-14-50.048840.parquet"]}]}]}
2023-12-16T16:18:27+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of kyujinpy/SOLAR-Platypus-10.7B-v2 Dataset automatically created during the evaluation run of model kyujinpy/SOLAR-Platypus-10.7B-v2 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T16:14:50.048840(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of kyujinpy/SOLAR-Platypus-10.7B-v2\n\n\n\nDataset automatically created during the evaluation run of model kyujinpy/SOLAR-Platypus-10.7B-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T16:14:50.048840(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of kyujinpy/SOLAR-Platypus-10.7B-v2\n\n\n\nDataset automatically created during the evaluation run of model kyujinpy/SOLAR-Platypus-10.7B-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T16:14:50.048840(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 197, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of kyujinpy/SOLAR-Platypus-10.7B-v2\n\n\n\nDataset automatically created during the evaluation run of model kyujinpy/SOLAR-Platypus-10.7B-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T16:14:50.048840(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
2d26ed06c8168672c2ee2493043caff1a23f4d60
# 🇺🇦 Open Source Ukrainian Text-to-Speech dataset named OLEKSA Join Ukrainian community - https://t.me/speech_synthesis_uk More details about this dataset - https://github.com/egorsmkv/ukrainian-tts-datasets/tree/main/oleksa # Voice OLEKSA (male) The voice of: https://twitter.com/CHOBUDA ## Features - Quality: high - Duration: 6h - Audio formats: OPUS - Text format: JSONL (a `metadata.jsonl` file) - Frequency: 48000 Hz
Yehor/ukrainian-tts-oleksa
[ "task_categories:text-to-speech", "language:uk", "license:apache-2.0", "region:us" ]
2023-12-16T16:21:05+00:00
{"language": ["uk"], "license": "apache-2.0", "task_categories": ["text-to-speech"]}
2023-12-16T16:24:26+00:00
[]
[ "uk" ]
TAGS #task_categories-text-to-speech #language-Ukrainian #license-apache-2.0 #region-us
# 🇺🇦 Open Source Ukrainian Text-to-Speech dataset named OLEKSA Join Ukrainian community - https://t.me/speech_synthesis_uk More details about this dataset - URL # Voice OLEKSA (male) The voice of: URL ## Features - Quality: high - Duration: 6h - Audio formats: OPUS - Text format: JSONL (a 'URL' file) - Frequency: 48000 Hz
[ "# 🇺🇦 Open Source Ukrainian Text-to-Speech dataset named OLEKSA\n\nJoin Ukrainian community - https://t.me/speech_synthesis_uk\n\nMore details about this dataset - URL", "# Voice OLEKSA (male)\n\nThe voice of: URL", "## Features\n\n- Quality: high\n- Duration: 6h\n- Audio formats: OPUS\n- Text format: JSONL (a 'URL' file)\n- Frequency: 48000 Hz" ]
[ "TAGS\n#task_categories-text-to-speech #language-Ukrainian #license-apache-2.0 #region-us \n", "# 🇺🇦 Open Source Ukrainian Text-to-Speech dataset named OLEKSA\n\nJoin Ukrainian community - https://t.me/speech_synthesis_uk\n\nMore details about this dataset - URL", "# Voice OLEKSA (male)\n\nThe voice of: URL", "## Features\n\n- Quality: high\n- Duration: 6h\n- Audio formats: OPUS\n- Text format: JSONL (a 'URL' file)\n- Frequency: 48000 Hz" ]
[ 34, 49, 13, 41 ]
[ "passage: TAGS\n#task_categories-text-to-speech #language-Ukrainian #license-apache-2.0 #region-us \n# 🇺🇦 Open Source Ukrainian Text-to-Speech dataset named OLEKSA\n\nJoin Ukrainian community - https://t.me/speech_synthesis_uk\n\nMore details about this dataset - URL# Voice OLEKSA (male)\n\nThe voice of: URL## Features\n\n- Quality: high\n- Duration: 6h\n- Audio formats: OPUS\n- Text format: JSONL (a 'URL' file)\n- Frequency: 48000 Hz" ]
1f6f964409b1c40669b05e29e917ba792590d075
# Dataset Card for Evaluation run of kyujinpy/SOLAR-Platypus-10.7B-v1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [kyujinpy/SOLAR-Platypus-10.7B-v1](https://huggingface.co/kyujinpy/SOLAR-Platypus-10.7B-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_kyujinpy__SOLAR-Platypus-10.7B-v1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T16:18:16.203947](https://huggingface.co/datasets/open-llm-leaderboard/details_kyujinpy__SOLAR-Platypus-10.7B-v1/blob/main/results_2023-12-16T16-18-16.203947.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5995716192292146, "acc_stderr": 0.03274801514976459, "acc_norm": 0.6080034028429626, "acc_norm_stderr": 0.033508703676958934, "mc1": 0.35006119951040393, "mc1_stderr": 0.01669794942015103, "mc2": 0.5157940312549367, "mc2_stderr": 0.01467999948196073 }, "harness|arc:challenge|25": { "acc": 0.5784982935153583, "acc_stderr": 0.014430197069326023, "acc_norm": 0.6168941979522184, "acc_norm_stderr": 0.014206472661672877 }, "harness|hellaswag|10": { "acc": 0.6436964748058156, "acc_stderr": 0.004779276329704051, "acc_norm": 0.8422624975104561, "acc_norm_stderr": 0.003637497708934033 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6074074074074074, "acc_stderr": 0.0421850621536888, "acc_norm": 0.6074074074074074, "acc_norm_stderr": 0.0421850621536888 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6447368421052632, "acc_stderr": 0.03894734487013317, "acc_norm": 0.6447368421052632, "acc_norm_stderr": 0.03894734487013317 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.67, "acc_stderr": 0.047258156262526066, "acc_norm": 0.67, "acc_norm_stderr": 0.047258156262526066 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6490566037735849, "acc_stderr": 0.02937364625323469, "acc_norm": 0.6490566037735849, "acc_norm_stderr": 0.02937364625323469 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7152777777777778, "acc_stderr": 0.03773809990686934, "acc_norm": 0.7152777777777778, "acc_norm_stderr": 0.03773809990686934 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.49, "acc_stderr": 0.05024183937956911, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956911 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6127167630057804, "acc_stderr": 0.03714325906302064, "acc_norm": 0.6127167630057804, "acc_norm_stderr": 0.03714325906302064 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.30392156862745096, "acc_stderr": 0.04576665403207762, "acc_norm": 0.30392156862745096, "acc_norm_stderr": 0.04576665403207762 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.04292346959909281, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909281 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5234042553191489, "acc_stderr": 0.03265019475033582, "acc_norm": 0.5234042553191489, "acc_norm_stderr": 0.03265019475033582 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.42105263157894735, "acc_stderr": 0.046446020912223177, "acc_norm": 0.42105263157894735, "acc_norm_stderr": 0.046446020912223177 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5379310344827586, "acc_stderr": 0.041546596717075474, "acc_norm": 0.5379310344827586, "acc_norm_stderr": 0.041546596717075474 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42328042328042326, "acc_stderr": 0.025446365634406772, "acc_norm": 0.42328042328042326, "acc_norm_stderr": 0.025446365634406772 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42063492063492064, "acc_stderr": 0.04415438226743744, "acc_norm": 0.42063492063492064, "acc_norm_stderr": 0.04415438226743744 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7193548387096774, "acc_stderr": 0.025560604721022884, "acc_norm": 0.7193548387096774, "acc_norm_stderr": 0.025560604721022884 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4187192118226601, "acc_stderr": 0.034711928605184676, "acc_norm": 0.4187192118226601, "acc_norm_stderr": 0.034711928605184676 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.793939393939394, "acc_stderr": 0.03158415324047711, "acc_norm": 0.793939393939394, "acc_norm_stderr": 0.03158415324047711 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7777777777777778, "acc_stderr": 0.029620227874790486, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.029620227874790486 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.844559585492228, "acc_stderr": 0.0261484834691533, "acc_norm": 0.844559585492228, "acc_norm_stderr": 0.0261484834691533 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6025641025641025, "acc_stderr": 0.024811920017903836, "acc_norm": 0.6025641025641025, "acc_norm_stderr": 0.024811920017903836 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3, "acc_stderr": 0.027940457136228402, "acc_norm": 0.3, "acc_norm_stderr": 0.027940457136228402 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5672268907563025, "acc_stderr": 0.032183581077426124, "acc_norm": 0.5672268907563025, "acc_norm_stderr": 0.032183581077426124 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3708609271523179, "acc_stderr": 0.03943966699183629, "acc_norm": 0.3708609271523179, "acc_norm_stderr": 0.03943966699183629 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7926605504587156, "acc_stderr": 0.017381415563608678, "acc_norm": 0.7926605504587156, "acc_norm_stderr": 0.017381415563608678 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.42592592592592593, "acc_stderr": 0.03372343271653063, "acc_norm": 0.42592592592592593, "acc_norm_stderr": 0.03372343271653063 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8333333333333334, "acc_stderr": 0.02615686752393104, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.02615686752393104 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8227848101265823, "acc_stderr": 0.024856364184503224, "acc_norm": 0.8227848101265823, "acc_norm_stderr": 0.024856364184503224 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6771300448430493, "acc_stderr": 0.03138147637575498, "acc_norm": 0.6771300448430493, "acc_norm_stderr": 0.03138147637575498 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.648854961832061, "acc_stderr": 0.04186445163013751, "acc_norm": 0.648854961832061, "acc_norm_stderr": 0.04186445163013751 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6694214876033058, "acc_stderr": 0.04294340845212094, "acc_norm": 0.6694214876033058, "acc_norm_stderr": 0.04294340845212094 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7314814814814815, "acc_stderr": 0.042844679680521934, "acc_norm": 0.7314814814814815, "acc_norm_stderr": 0.042844679680521934 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6748466257668712, "acc_stderr": 0.036803503712864595, "acc_norm": 0.6748466257668712, "acc_norm_stderr": 0.036803503712864595 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.42857142857142855, "acc_stderr": 0.04697113923010212, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.04697113923010212 }, "harness|hendrycksTest-management|5": { "acc": 0.7572815533980582, "acc_stderr": 0.042450224863844935, "acc_norm": 0.7572815533980582, "acc_norm_stderr": 0.042450224863844935 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8418803418803419, "acc_stderr": 0.02390232554956039, "acc_norm": 0.8418803418803419, "acc_norm_stderr": 0.02390232554956039 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.73, "acc_stderr": 0.044619604333847394, "acc_norm": 0.73, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8109833971902938, "acc_stderr": 0.014000791294407004, "acc_norm": 0.8109833971902938, "acc_norm_stderr": 0.014000791294407004 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6213872832369942, "acc_stderr": 0.02611374936131034, "acc_norm": 0.6213872832369942, "acc_norm_stderr": 0.02611374936131034 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2737430167597765, "acc_stderr": 0.014912413096372435, "acc_norm": 0.2737430167597765, "acc_norm_stderr": 0.014912413096372435 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6601307189542484, "acc_stderr": 0.027121956071388856, "acc_norm": 0.6601307189542484, "acc_norm_stderr": 0.027121956071388856 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6591639871382636, "acc_stderr": 0.026920841260776165, "acc_norm": 0.6591639871382636, "acc_norm_stderr": 0.026920841260776165 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7283950617283951, "acc_stderr": 0.024748624490537382, "acc_norm": 0.7283950617283951, "acc_norm_stderr": 0.024748624490537382 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4645390070921986, "acc_stderr": 0.02975238965742705, "acc_norm": 0.4645390070921986, "acc_norm_stderr": 0.02975238965742705 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.42242503259452413, "acc_stderr": 0.01261560047573492, "acc_norm": 0.42242503259452413, "acc_norm_stderr": 0.01261560047573492 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5845588235294118, "acc_stderr": 0.029935342707877746, "acc_norm": 0.5845588235294118, "acc_norm_stderr": 0.029935342707877746 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6045751633986928, "acc_stderr": 0.019780465954777515, "acc_norm": 0.6045751633986928, "acc_norm_stderr": 0.019780465954777515 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.0449429086625209, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.0449429086625209 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6816326530612244, "acc_stderr": 0.02982253379398208, "acc_norm": 0.6816326530612244, "acc_norm_stderr": 0.02982253379398208 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8109452736318408, "acc_stderr": 0.02768691358801302, "acc_norm": 0.8109452736318408, "acc_norm_stderr": 0.02768691358801302 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.035887028128263686, "acc_norm": 0.85, "acc_norm_stderr": 0.035887028128263686 }, "harness|hendrycksTest-virology|5": { "acc": 0.5180722891566265, "acc_stderr": 0.03889951252827216, "acc_norm": 0.5180722891566265, "acc_norm_stderr": 0.03889951252827216 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8011695906432749, "acc_stderr": 0.030611116557432528, "acc_norm": 0.8011695906432749, "acc_norm_stderr": 0.030611116557432528 }, "harness|truthfulqa:mc|0": { "mc1": 0.35006119951040393, "mc1_stderr": 0.01669794942015103, "mc2": 0.5157940312549367, "mc2_stderr": 0.01467999948196073 }, "harness|winogrande|5": { "acc": 0.8279400157853196, "acc_stderr": 0.010607731615247007 }, "harness|gsm8k|5": { "acc": 0.1106899166034875, "acc_stderr": 0.008642172551392492 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_kyujinpy__SOLAR-Platypus-10.7B-v1
[ "region:us" ]
2023-12-16T16:21:11+00:00
{"pretty_name": "Evaluation run of kyujinpy/SOLAR-Platypus-10.7B-v1", "dataset_summary": "Dataset automatically created during the evaluation run of model [kyujinpy/SOLAR-Platypus-10.7B-v1](https://huggingface.co/kyujinpy/SOLAR-Platypus-10.7B-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_kyujinpy__SOLAR-Platypus-10.7B-v1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T16:18:16.203947](https://huggingface.co/datasets/open-llm-leaderboard/details_kyujinpy__SOLAR-Platypus-10.7B-v1/blob/main/results_2023-12-16T16-18-16.203947.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5995716192292146,\n \"acc_stderr\": 0.03274801514976459,\n \"acc_norm\": 0.6080034028429626,\n \"acc_norm_stderr\": 0.033508703676958934,\n \"mc1\": 0.35006119951040393,\n \"mc1_stderr\": 0.01669794942015103,\n \"mc2\": 0.5157940312549367,\n \"mc2_stderr\": 0.01467999948196073\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5784982935153583,\n \"acc_stderr\": 0.014430197069326023,\n \"acc_norm\": 0.6168941979522184,\n \"acc_norm_stderr\": 0.014206472661672877\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6436964748058156,\n \"acc_stderr\": 0.004779276329704051,\n \"acc_norm\": 0.8422624975104561,\n \"acc_norm_stderr\": 0.003637497708934033\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6074074074074074,\n \"acc_stderr\": 0.0421850621536888,\n \"acc_norm\": 0.6074074074074074,\n \"acc_norm_stderr\": 0.0421850621536888\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6447368421052632,\n \"acc_stderr\": 0.03894734487013317,\n \"acc_norm\": 0.6447368421052632,\n \"acc_norm_stderr\": 0.03894734487013317\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.047258156262526066,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.047258156262526066\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6490566037735849,\n \"acc_stderr\": 0.02937364625323469,\n \"acc_norm\": 0.6490566037735849,\n \"acc_norm_stderr\": 0.02937364625323469\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7152777777777778,\n \"acc_stderr\": 0.03773809990686934,\n \"acc_norm\": 0.7152777777777778,\n \"acc_norm_stderr\": 0.03773809990686934\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956911,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956911\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6127167630057804,\n \"acc_stderr\": 0.03714325906302064,\n \"acc_norm\": 0.6127167630057804,\n \"acc_norm_stderr\": 0.03714325906302064\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.30392156862745096,\n \"acc_stderr\": 0.04576665403207762,\n \"acc_norm\": 0.30392156862745096,\n \"acc_norm_stderr\": 0.04576665403207762\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909281,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909281\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5234042553191489,\n \"acc_stderr\": 0.03265019475033582,\n \"acc_norm\": 0.5234042553191489,\n \"acc_norm_stderr\": 0.03265019475033582\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.42105263157894735,\n \"acc_stderr\": 0.046446020912223177,\n \"acc_norm\": 0.42105263157894735,\n \"acc_norm_stderr\": 0.046446020912223177\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5379310344827586,\n \"acc_stderr\": 0.041546596717075474,\n \"acc_norm\": 0.5379310344827586,\n \"acc_norm_stderr\": 0.041546596717075474\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42328042328042326,\n \"acc_stderr\": 0.025446365634406772,\n \"acc_norm\": 0.42328042328042326,\n \"acc_norm_stderr\": 0.025446365634406772\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42063492063492064,\n \"acc_stderr\": 0.04415438226743744,\n \"acc_norm\": 0.42063492063492064,\n \"acc_norm_stderr\": 0.04415438226743744\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7193548387096774,\n \"acc_stderr\": 0.025560604721022884,\n \"acc_norm\": 0.7193548387096774,\n \"acc_norm_stderr\": 0.025560604721022884\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4187192118226601,\n \"acc_stderr\": 0.034711928605184676,\n \"acc_norm\": 0.4187192118226601,\n \"acc_norm_stderr\": 0.034711928605184676\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.793939393939394,\n \"acc_stderr\": 0.03158415324047711,\n \"acc_norm\": 0.793939393939394,\n \"acc_norm_stderr\": 0.03158415324047711\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.029620227874790486,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.029620227874790486\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.844559585492228,\n \"acc_stderr\": 0.0261484834691533,\n \"acc_norm\": 0.844559585492228,\n \"acc_norm_stderr\": 0.0261484834691533\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6025641025641025,\n \"acc_stderr\": 0.024811920017903836,\n \"acc_norm\": 0.6025641025641025,\n \"acc_norm_stderr\": 0.024811920017903836\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.027940457136228402,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.027940457136228402\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.5672268907563025,\n \"acc_stderr\": 0.032183581077426124,\n \"acc_norm\": 0.5672268907563025,\n \"acc_norm_stderr\": 0.032183581077426124\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3708609271523179,\n \"acc_stderr\": 0.03943966699183629,\n \"acc_norm\": 0.3708609271523179,\n \"acc_norm_stderr\": 0.03943966699183629\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7926605504587156,\n \"acc_stderr\": 0.017381415563608678,\n \"acc_norm\": 0.7926605504587156,\n \"acc_norm_stderr\": 0.017381415563608678\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.42592592592592593,\n \"acc_stderr\": 0.03372343271653063,\n \"acc_norm\": 0.42592592592592593,\n \"acc_norm_stderr\": 0.03372343271653063\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8333333333333334,\n \"acc_stderr\": 0.02615686752393104,\n \"acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.02615686752393104\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8227848101265823,\n \"acc_stderr\": 0.024856364184503224,\n \"acc_norm\": 0.8227848101265823,\n \"acc_norm_stderr\": 0.024856364184503224\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6771300448430493,\n \"acc_stderr\": 0.03138147637575498,\n \"acc_norm\": 0.6771300448430493,\n \"acc_norm_stderr\": 0.03138147637575498\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.648854961832061,\n \"acc_stderr\": 0.04186445163013751,\n \"acc_norm\": 0.648854961832061,\n \"acc_norm_stderr\": 0.04186445163013751\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6694214876033058,\n \"acc_stderr\": 0.04294340845212094,\n \"acc_norm\": 0.6694214876033058,\n \"acc_norm_stderr\": 0.04294340845212094\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7314814814814815,\n \"acc_stderr\": 0.042844679680521934,\n \"acc_norm\": 0.7314814814814815,\n \"acc_norm_stderr\": 0.042844679680521934\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6748466257668712,\n \"acc_stderr\": 0.036803503712864595,\n \"acc_norm\": 0.6748466257668712,\n \"acc_norm_stderr\": 0.036803503712864595\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.04697113923010212,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.04697113923010212\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7572815533980582,\n \"acc_stderr\": 0.042450224863844935,\n \"acc_norm\": 0.7572815533980582,\n \"acc_norm_stderr\": 0.042450224863844935\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8418803418803419,\n \"acc_stderr\": 0.02390232554956039,\n \"acc_norm\": 0.8418803418803419,\n \"acc_norm_stderr\": 0.02390232554956039\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8109833971902938,\n \"acc_stderr\": 0.014000791294407004,\n \"acc_norm\": 0.8109833971902938,\n \"acc_norm_stderr\": 0.014000791294407004\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6213872832369942,\n \"acc_stderr\": 0.02611374936131034,\n \"acc_norm\": 0.6213872832369942,\n \"acc_norm_stderr\": 0.02611374936131034\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2737430167597765,\n \"acc_stderr\": 0.014912413096372435,\n \"acc_norm\": 0.2737430167597765,\n \"acc_norm_stderr\": 0.014912413096372435\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6601307189542484,\n \"acc_stderr\": 0.027121956071388856,\n \"acc_norm\": 0.6601307189542484,\n \"acc_norm_stderr\": 0.027121956071388856\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6591639871382636,\n \"acc_stderr\": 0.026920841260776165,\n \"acc_norm\": 0.6591639871382636,\n \"acc_norm_stderr\": 0.026920841260776165\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7283950617283951,\n \"acc_stderr\": 0.024748624490537382,\n \"acc_norm\": 0.7283950617283951,\n \"acc_norm_stderr\": 0.024748624490537382\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4645390070921986,\n \"acc_stderr\": 0.02975238965742705,\n \"acc_norm\": 0.4645390070921986,\n \"acc_norm_stderr\": 0.02975238965742705\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.42242503259452413,\n \"acc_stderr\": 0.01261560047573492,\n \"acc_norm\": 0.42242503259452413,\n \"acc_norm_stderr\": 0.01261560047573492\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5845588235294118,\n \"acc_stderr\": 0.029935342707877746,\n \"acc_norm\": 0.5845588235294118,\n \"acc_norm_stderr\": 0.029935342707877746\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6045751633986928,\n \"acc_stderr\": 0.019780465954777515,\n \"acc_norm\": 0.6045751633986928,\n \"acc_norm_stderr\": 0.019780465954777515\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6816326530612244,\n \"acc_stderr\": 0.02982253379398208,\n \"acc_norm\": 0.6816326530612244,\n \"acc_norm_stderr\": 0.02982253379398208\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8109452736318408,\n \"acc_stderr\": 0.02768691358801302,\n \"acc_norm\": 0.8109452736318408,\n \"acc_norm_stderr\": 0.02768691358801302\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.035887028128263686,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.035887028128263686\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5180722891566265,\n \"acc_stderr\": 0.03889951252827216,\n \"acc_norm\": 0.5180722891566265,\n \"acc_norm_stderr\": 0.03889951252827216\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8011695906432749,\n \"acc_stderr\": 0.030611116557432528,\n \"acc_norm\": 0.8011695906432749,\n \"acc_norm_stderr\": 0.030611116557432528\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.35006119951040393,\n \"mc1_stderr\": 0.01669794942015103,\n \"mc2\": 0.5157940312549367,\n \"mc2_stderr\": 0.01467999948196073\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8279400157853196,\n \"acc_stderr\": 0.010607731615247007\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.1106899166034875,\n \"acc_stderr\": 0.008642172551392492\n }\n}\n```", "repo_url": "https://huggingface.co/kyujinpy/SOLAR-Platypus-10.7B-v1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|arc:challenge|25_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|gsm8k|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hellaswag|10_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T16-18-16.203947.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["**/details_harness|winogrande|5_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T16-18-16.203947.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T16_18_16.203947", "path": ["results_2023-12-16T16-18-16.203947.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T16-18-16.203947.parquet"]}]}]}
2023-12-16T16:22:15+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of kyujinpy/SOLAR-Platypus-10.7B-v1 Dataset automatically created during the evaluation run of model kyujinpy/SOLAR-Platypus-10.7B-v1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T16:18:16.203947(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of kyujinpy/SOLAR-Platypus-10.7B-v1\n\n\n\nDataset automatically created during the evaluation run of model kyujinpy/SOLAR-Platypus-10.7B-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T16:18:16.203947(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of kyujinpy/SOLAR-Platypus-10.7B-v1\n\n\n\nDataset automatically created during the evaluation run of model kyujinpy/SOLAR-Platypus-10.7B-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T16:18:16.203947(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 197, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of kyujinpy/SOLAR-Platypus-10.7B-v1\n\n\n\nDataset automatically created during the evaluation run of model kyujinpy/SOLAR-Platypus-10.7B-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T16:18:16.203947(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
f5a79113c0d2c19cb2ea1a40db6b9d105118f2ce
# 🇺🇦 Open Source Ukrainian Text-to-Speech dataset named Kateryna Join Ukrainian community - https://t.me/speech_synthesis_uk More details about this dataset - https://github.com/egorsmkv/ukrainian-tts-datasets/tree/main/kateryna # Voice KATERYNA (female) License (dual): - For non-commerical applications: [CC-BY-NC](https://creativecommons.org/licenses/by-nc/2.0/) - For commercial applications: contact the voice talent directly using https://t.me/shalenamotion ## Features - Quality: high - Duration: 2h40m - Audio formats: OPUS - Text format: JSONL (a `metadata.jsonl` file) - Frequency: 48000 Hz
Yehor/ukrainian-tts-kateryna
[ "task_categories:text-to-speech", "language:uk", "license:cc-by-nc-4.0", "region:us" ]
2023-12-16T16:27:34+00:00
{"language": ["uk"], "license": "cc-by-nc-4.0", "task_categories": ["text-to-speech"]}
2023-12-16T16:28:46+00:00
[]
[ "uk" ]
TAGS #task_categories-text-to-speech #language-Ukrainian #license-cc-by-nc-4.0 #region-us
# 🇺🇦 Open Source Ukrainian Text-to-Speech dataset named Kateryna Join Ukrainian community - https://t.me/speech_synthesis_uk More details about this dataset - URL # Voice KATERYNA (female) License (dual): - For non-commerical applications: CC-BY-NC - For commercial applications: contact the voice talent directly using https://t.me/shalenamotion ## Features - Quality: high - Duration: 2h40m - Audio formats: OPUS - Text format: JSONL (a 'URL' file) - Frequency: 48000 Hz
[ "# 🇺🇦 Open Source Ukrainian Text-to-Speech dataset named Kateryna\n\nJoin Ukrainian community - https://t.me/speech_synthesis_uk\n\nMore details about this dataset - URL", "# Voice KATERYNA (female)\n\nLicense (dual): \n\n- For non-commerical applications: CC-BY-NC\n- For commercial applications: contact the voice talent directly using https://t.me/shalenamotion", "## Features\n\n- Quality: high\n- Duration: 2h40m\n- Audio formats: OPUS\n- Text format: JSONL (a 'URL' file)\n- Frequency: 48000 Hz" ]
[ "TAGS\n#task_categories-text-to-speech #language-Ukrainian #license-cc-by-nc-4.0 #region-us \n", "# 🇺🇦 Open Source Ukrainian Text-to-Speech dataset named Kateryna\n\nJoin Ukrainian community - https://t.me/speech_synthesis_uk\n\nMore details about this dataset - URL", "# Voice KATERYNA (female)\n\nLicense (dual): \n\n- For non-commerical applications: CC-BY-NC\n- For commercial applications: contact the voice talent directly using https://t.me/shalenamotion", "## Features\n\n- Quality: high\n- Duration: 2h40m\n- Audio formats: OPUS\n- Text format: JSONL (a 'URL' file)\n- Frequency: 48000 Hz" ]
[ 37, 49, 50, 43 ]
[ "passage: TAGS\n#task_categories-text-to-speech #language-Ukrainian #license-cc-by-nc-4.0 #region-us \n# 🇺🇦 Open Source Ukrainian Text-to-Speech dataset named Kateryna\n\nJoin Ukrainian community - https://t.me/speech_synthesis_uk\n\nMore details about this dataset - URL# Voice KATERYNA (female)\n\nLicense (dual): \n\n- For non-commerical applications: CC-BY-NC\n- For commercial applications: contact the voice talent directly using https://t.me/shalenamotion## Features\n\n- Quality: high\n- Duration: 2h40m\n- Audio formats: OPUS\n- Text format: JSONL (a 'URL' file)\n- Frequency: 48000 Hz" ]
978062cf7d8e4c858388a7e7693b004ca7df625a
# Dataset Card for Evaluation run of ignos/LeoScorpius-GreenNode-Alpaca-7B-v1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [ignos/LeoScorpius-GreenNode-Alpaca-7B-v1](https://huggingface.co/ignos/LeoScorpius-GreenNode-Alpaca-7B-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ignos__LeoScorpius-GreenNode-Alpaca-7B-v1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T16:30:42.646847](https://huggingface.co/datasets/open-llm-leaderboard/details_ignos__LeoScorpius-GreenNode-Alpaca-7B-v1/blob/main/results_2023-12-16T16-30-42.646847.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6576641618199561, "acc_stderr": 0.03195573748726771, "acc_norm": 0.657374743909787, "acc_norm_stderr": 0.03261672779060913, "mc1": 0.554467564259486, "mc1_stderr": 0.017399335280140343, "mc2": 0.6935418249648612, "mc2_stderr": 0.015012402087814694 }, "harness|arc:challenge|25": { "acc": 0.6945392491467577, "acc_stderr": 0.013460080478002508, "acc_norm": 0.7235494880546075, "acc_norm_stderr": 0.013069662474252423 }, "harness|hellaswag|10": { "acc": 0.7094204341764588, "acc_stderr": 0.0045310191594141085, "acc_norm": 0.8815972913762199, "acc_norm_stderr": 0.0032242407223513204 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6444444444444445, "acc_stderr": 0.04135176749720386, "acc_norm": 0.6444444444444445, "acc_norm_stderr": 0.04135176749720386 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6973684210526315, "acc_stderr": 0.037385206761196686, "acc_norm": 0.6973684210526315, "acc_norm_stderr": 0.037385206761196686 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.62, "acc_stderr": 0.048783173121456316, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7245283018867924, "acc_stderr": 0.027495663683724057, "acc_norm": 0.7245283018867924, "acc_norm_stderr": 0.027495663683724057 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7777777777777778, "acc_stderr": 0.03476590104304134, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.03476590104304134 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.57, "acc_stderr": 0.04975698519562428, "acc_norm": 0.57, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6878612716763006, "acc_stderr": 0.035331333893236574, "acc_norm": 0.6878612716763006, "acc_norm_stderr": 0.035331333893236574 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.46078431372549017, "acc_stderr": 0.04959859966384181, "acc_norm": 0.46078431372549017, "acc_norm_stderr": 0.04959859966384181 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.042295258468165065, "acc_norm": 0.77, "acc_norm_stderr": 0.042295258468165065 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.574468085106383, "acc_stderr": 0.03232146916224468, "acc_norm": 0.574468085106383, "acc_norm_stderr": 0.03232146916224468 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5724137931034483, "acc_stderr": 0.04122737111370332, "acc_norm": 0.5724137931034483, "acc_norm_stderr": 0.04122737111370332 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42063492063492064, "acc_stderr": 0.025424835086924, "acc_norm": 0.42063492063492064, "acc_norm_stderr": 0.025424835086924 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4603174603174603, "acc_stderr": 0.04458029125470973, "acc_norm": 0.4603174603174603, "acc_norm_stderr": 0.04458029125470973 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.04793724854411019, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411019 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7806451612903226, "acc_stderr": 0.023540799358723295, "acc_norm": 0.7806451612903226, "acc_norm_stderr": 0.023540799358723295 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5270935960591133, "acc_stderr": 0.03512819077876106, "acc_norm": 0.5270935960591133, "acc_norm_stderr": 0.03512819077876106 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7757575757575758, "acc_stderr": 0.03256866661681102, "acc_norm": 0.7757575757575758, "acc_norm_stderr": 0.03256866661681102 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7929292929292929, "acc_stderr": 0.028869778460267042, "acc_norm": 0.7929292929292929, "acc_norm_stderr": 0.028869778460267042 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8963730569948186, "acc_stderr": 0.02199531196364424, "acc_norm": 0.8963730569948186, "acc_norm_stderr": 0.02199531196364424 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6666666666666666, "acc_stderr": 0.023901157979402538, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.023901157979402538 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.35185185185185186, "acc_stderr": 0.029116617606083008, "acc_norm": 0.35185185185185186, "acc_norm_stderr": 0.029116617606083008 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6722689075630253, "acc_stderr": 0.03048991141767323, "acc_norm": 0.6722689075630253, "acc_norm_stderr": 0.03048991141767323 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3443708609271523, "acc_stderr": 0.038796870240733264, "acc_norm": 0.3443708609271523, "acc_norm_stderr": 0.038796870240733264 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8532110091743119, "acc_stderr": 0.01517314184512625, "acc_norm": 0.8532110091743119, "acc_norm_stderr": 0.01517314184512625 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5370370370370371, "acc_stderr": 0.03400603625538272, "acc_norm": 0.5370370370370371, "acc_norm_stderr": 0.03400603625538272 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8431372549019608, "acc_stderr": 0.02552472232455335, "acc_norm": 0.8431372549019608, "acc_norm_stderr": 0.02552472232455335 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8059071729957806, "acc_stderr": 0.025744902532290916, "acc_norm": 0.8059071729957806, "acc_norm_stderr": 0.025744902532290916 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.695067264573991, "acc_stderr": 0.030898610882477515, "acc_norm": 0.695067264573991, "acc_norm_stderr": 0.030898610882477515 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8244274809160306, "acc_stderr": 0.03336820338476074, "acc_norm": 0.8244274809160306, "acc_norm_stderr": 0.03336820338476074 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8016528925619835, "acc_stderr": 0.03640118271990947, "acc_norm": 0.8016528925619835, "acc_norm_stderr": 0.03640118271990947 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7962962962962963, "acc_stderr": 0.03893542518824847, "acc_norm": 0.7962962962962963, "acc_norm_stderr": 0.03893542518824847 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7852760736196319, "acc_stderr": 0.03226219377286775, "acc_norm": 0.7852760736196319, "acc_norm_stderr": 0.03226219377286775 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.41964285714285715, "acc_stderr": 0.04684099321077106, "acc_norm": 0.41964285714285715, "acc_norm_stderr": 0.04684099321077106 }, "harness|hendrycksTest-management|5": { "acc": 0.7572815533980582, "acc_stderr": 0.04245022486384495, "acc_norm": 0.7572815533980582, "acc_norm_stderr": 0.04245022486384495 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8717948717948718, "acc_stderr": 0.02190190511507333, "acc_norm": 0.8717948717948718, "acc_norm_stderr": 0.02190190511507333 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8365261813537676, "acc_stderr": 0.013223928616741617, "acc_norm": 0.8365261813537676, "acc_norm_stderr": 0.013223928616741617 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7456647398843931, "acc_stderr": 0.02344582627654554, "acc_norm": 0.7456647398843931, "acc_norm_stderr": 0.02344582627654554 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.47262569832402235, "acc_stderr": 0.016697420650642752, "acc_norm": 0.47262569832402235, "acc_norm_stderr": 0.016697420650642752 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7287581699346405, "acc_stderr": 0.02545775669666788, "acc_norm": 0.7287581699346405, "acc_norm_stderr": 0.02545775669666788 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7170418006430869, "acc_stderr": 0.02558306248998481, "acc_norm": 0.7170418006430869, "acc_norm_stderr": 0.02558306248998481 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7469135802469136, "acc_stderr": 0.024191808600712995, "acc_norm": 0.7469135802469136, "acc_norm_stderr": 0.024191808600712995 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4858156028368794, "acc_stderr": 0.02981549448368206, "acc_norm": 0.4858156028368794, "acc_norm_stderr": 0.02981549448368206 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.47327249022164275, "acc_stderr": 0.01275197796767601, "acc_norm": 0.47327249022164275, "acc_norm_stderr": 0.01275197796767601 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6838235294117647, "acc_stderr": 0.028245687391462937, "acc_norm": 0.6838235294117647, "acc_norm_stderr": 0.028245687391462937 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6683006535947712, "acc_stderr": 0.01904748523936038, "acc_norm": 0.6683006535947712, "acc_norm_stderr": 0.01904748523936038 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7224489795918367, "acc_stderr": 0.028666857790274648, "acc_norm": 0.7224489795918367, "acc_norm_stderr": 0.028666857790274648 }, "harness|hendrycksTest-sociology|5": { "acc": 0.835820895522388, "acc_stderr": 0.026193923544454115, "acc_norm": 0.835820895522388, "acc_norm_stderr": 0.026193923544454115 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.0348735088019777, "acc_norm": 0.86, "acc_norm_stderr": 0.0348735088019777 }, "harness|hendrycksTest-virology|5": { "acc": 0.5481927710843374, "acc_stderr": 0.03874371556587953, "acc_norm": 0.5481927710843374, "acc_norm_stderr": 0.03874371556587953 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8245614035087719, "acc_stderr": 0.029170885500727665, "acc_norm": 0.8245614035087719, "acc_norm_stderr": 0.029170885500727665 }, "harness|truthfulqa:mc|0": { "mc1": 0.554467564259486, "mc1_stderr": 0.017399335280140343, "mc2": 0.6935418249648612, "mc2_stderr": 0.015012402087814694 }, "harness|winogrande|5": { "acc": 0.8232044198895028, "acc_stderr": 0.01072192328791875 }, "harness|gsm8k|5": { "acc": 0.7103866565579985, "acc_stderr": 0.01249392734865963 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_ignos__LeoScorpius-GreenNode-Alpaca-7B-v1
[ "region:us" ]
2023-12-16T16:33:32+00:00
{"pretty_name": "Evaluation run of ignos/LeoScorpius-GreenNode-Alpaca-7B-v1", "dataset_summary": "Dataset automatically created during the evaluation run of model [ignos/LeoScorpius-GreenNode-Alpaca-7B-v1](https://huggingface.co/ignos/LeoScorpius-GreenNode-Alpaca-7B-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ignos__LeoScorpius-GreenNode-Alpaca-7B-v1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T16:30:42.646847](https://huggingface.co/datasets/open-llm-leaderboard/details_ignos__LeoScorpius-GreenNode-Alpaca-7B-v1/blob/main/results_2023-12-16T16-30-42.646847.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6576641618199561,\n \"acc_stderr\": 0.03195573748726771,\n \"acc_norm\": 0.657374743909787,\n \"acc_norm_stderr\": 0.03261672779060913,\n \"mc1\": 0.554467564259486,\n \"mc1_stderr\": 0.017399335280140343,\n \"mc2\": 0.6935418249648612,\n \"mc2_stderr\": 0.015012402087814694\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6945392491467577,\n \"acc_stderr\": 0.013460080478002508,\n \"acc_norm\": 0.7235494880546075,\n \"acc_norm_stderr\": 0.013069662474252423\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7094204341764588,\n \"acc_stderr\": 0.0045310191594141085,\n \"acc_norm\": 0.8815972913762199,\n \"acc_norm_stderr\": 0.0032242407223513204\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6444444444444445,\n \"acc_stderr\": 0.04135176749720386,\n \"acc_norm\": 0.6444444444444445,\n \"acc_norm_stderr\": 0.04135176749720386\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6973684210526315,\n \"acc_stderr\": 0.037385206761196686,\n \"acc_norm\": 0.6973684210526315,\n \"acc_norm_stderr\": 0.037385206761196686\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7245283018867924,\n \"acc_stderr\": 0.027495663683724057,\n \"acc_norm\": 0.7245283018867924,\n \"acc_norm_stderr\": 0.027495663683724057\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.03476590104304134,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.03476590104304134\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6878612716763006,\n \"acc_stderr\": 0.035331333893236574,\n \"acc_norm\": 0.6878612716763006,\n \"acc_norm_stderr\": 0.035331333893236574\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.46078431372549017,\n \"acc_stderr\": 0.04959859966384181,\n \"acc_norm\": 0.46078431372549017,\n \"acc_norm_stderr\": 0.04959859966384181\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.042295258468165065,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.042295258468165065\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.574468085106383,\n \"acc_stderr\": 0.03232146916224468,\n \"acc_norm\": 0.574468085106383,\n \"acc_norm_stderr\": 0.03232146916224468\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5724137931034483,\n \"acc_stderr\": 0.04122737111370332,\n \"acc_norm\": 0.5724137931034483,\n \"acc_norm_stderr\": 0.04122737111370332\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42063492063492064,\n \"acc_stderr\": 0.025424835086924,\n \"acc_norm\": 0.42063492063492064,\n \"acc_norm_stderr\": 0.025424835086924\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4603174603174603,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.4603174603174603,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411019,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411019\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7806451612903226,\n \"acc_stderr\": 0.023540799358723295,\n \"acc_norm\": 0.7806451612903226,\n \"acc_norm_stderr\": 0.023540799358723295\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5270935960591133,\n \"acc_stderr\": 0.03512819077876106,\n \"acc_norm\": 0.5270935960591133,\n \"acc_norm_stderr\": 0.03512819077876106\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.03256866661681102,\n \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.03256866661681102\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7929292929292929,\n \"acc_stderr\": 0.028869778460267042,\n \"acc_norm\": 0.7929292929292929,\n \"acc_norm_stderr\": 0.028869778460267042\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.02199531196364424,\n \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.02199531196364424\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.023901157979402538,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.023901157979402538\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.35185185185185186,\n \"acc_stderr\": 0.029116617606083008,\n \"acc_norm\": 0.35185185185185186,\n \"acc_norm_stderr\": 0.029116617606083008\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6722689075630253,\n \"acc_stderr\": 0.03048991141767323,\n \"acc_norm\": 0.6722689075630253,\n \"acc_norm_stderr\": 0.03048991141767323\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3443708609271523,\n \"acc_stderr\": 0.038796870240733264,\n \"acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.038796870240733264\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8532110091743119,\n \"acc_stderr\": 0.01517314184512625,\n \"acc_norm\": 0.8532110091743119,\n \"acc_norm_stderr\": 0.01517314184512625\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5370370370370371,\n \"acc_stderr\": 0.03400603625538272,\n \"acc_norm\": 0.5370370370370371,\n \"acc_norm_stderr\": 0.03400603625538272\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8431372549019608,\n \"acc_stderr\": 0.02552472232455335,\n \"acc_norm\": 0.8431372549019608,\n \"acc_norm_stderr\": 0.02552472232455335\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8059071729957806,\n \"acc_stderr\": 0.025744902532290916,\n \"acc_norm\": 0.8059071729957806,\n \"acc_norm_stderr\": 0.025744902532290916\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.695067264573991,\n \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8244274809160306,\n \"acc_stderr\": 0.03336820338476074,\n \"acc_norm\": 0.8244274809160306,\n \"acc_norm_stderr\": 0.03336820338476074\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8016528925619835,\n \"acc_stderr\": 0.03640118271990947,\n \"acc_norm\": 0.8016528925619835,\n \"acc_norm_stderr\": 0.03640118271990947\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7962962962962963,\n \"acc_stderr\": 0.03893542518824847,\n \"acc_norm\": 0.7962962962962963,\n \"acc_norm_stderr\": 0.03893542518824847\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7852760736196319,\n \"acc_stderr\": 0.03226219377286775,\n \"acc_norm\": 0.7852760736196319,\n \"acc_norm_stderr\": 0.03226219377286775\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.41964285714285715,\n \"acc_stderr\": 0.04684099321077106,\n \"acc_norm\": 0.41964285714285715,\n \"acc_norm_stderr\": 0.04684099321077106\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7572815533980582,\n \"acc_stderr\": 0.04245022486384495,\n \"acc_norm\": 0.7572815533980582,\n \"acc_norm_stderr\": 0.04245022486384495\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8717948717948718,\n \"acc_stderr\": 0.02190190511507333,\n \"acc_norm\": 0.8717948717948718,\n \"acc_norm_stderr\": 0.02190190511507333\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8365261813537676,\n \"acc_stderr\": 0.013223928616741617,\n \"acc_norm\": 0.8365261813537676,\n \"acc_norm_stderr\": 0.013223928616741617\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7456647398843931,\n \"acc_stderr\": 0.02344582627654554,\n \"acc_norm\": 0.7456647398843931,\n \"acc_norm_stderr\": 0.02344582627654554\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.47262569832402235,\n \"acc_stderr\": 0.016697420650642752,\n \"acc_norm\": 0.47262569832402235,\n \"acc_norm_stderr\": 0.016697420650642752\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7287581699346405,\n \"acc_stderr\": 0.02545775669666788,\n \"acc_norm\": 0.7287581699346405,\n \"acc_norm_stderr\": 0.02545775669666788\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7170418006430869,\n \"acc_stderr\": 0.02558306248998481,\n \"acc_norm\": 0.7170418006430869,\n \"acc_norm_stderr\": 0.02558306248998481\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7469135802469136,\n \"acc_stderr\": 0.024191808600712995,\n \"acc_norm\": 0.7469135802469136,\n \"acc_norm_stderr\": 0.024191808600712995\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4858156028368794,\n \"acc_stderr\": 0.02981549448368206,\n \"acc_norm\": 0.4858156028368794,\n \"acc_norm_stderr\": 0.02981549448368206\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.47327249022164275,\n \"acc_stderr\": 0.01275197796767601,\n \"acc_norm\": 0.47327249022164275,\n \"acc_norm_stderr\": 0.01275197796767601\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6838235294117647,\n \"acc_stderr\": 0.028245687391462937,\n \"acc_norm\": 0.6838235294117647,\n \"acc_norm_stderr\": 0.028245687391462937\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6683006535947712,\n \"acc_stderr\": 0.01904748523936038,\n \"acc_norm\": 0.6683006535947712,\n \"acc_norm_stderr\": 0.01904748523936038\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7224489795918367,\n \"acc_stderr\": 0.028666857790274648,\n \"acc_norm\": 0.7224489795918367,\n \"acc_norm_stderr\": 0.028666857790274648\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.026193923544454115,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.026193923544454115\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.0348735088019777,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.0348735088019777\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5481927710843374,\n \"acc_stderr\": 0.03874371556587953,\n \"acc_norm\": 0.5481927710843374,\n \"acc_norm_stderr\": 0.03874371556587953\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8245614035087719,\n \"acc_stderr\": 0.029170885500727665,\n \"acc_norm\": 0.8245614035087719,\n \"acc_norm_stderr\": 0.029170885500727665\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.554467564259486,\n \"mc1_stderr\": 0.017399335280140343,\n \"mc2\": 0.6935418249648612,\n \"mc2_stderr\": 0.015012402087814694\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8232044198895028,\n \"acc_stderr\": 0.01072192328791875\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7103866565579985,\n \"acc_stderr\": 0.01249392734865963\n }\n}\n```", "repo_url": "https://huggingface.co/ignos/LeoScorpius-GreenNode-Alpaca-7B-v1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|arc:challenge|25_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|gsm8k|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hellaswag|10_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T16-30-42.646847.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["**/details_harness|winogrande|5_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T16-30-42.646847.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T16_30_42.646847", "path": ["results_2023-12-16T16-30-42.646847.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T16-30-42.646847.parquet"]}]}]}
2023-12-16T16:34:16+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of ignos/LeoScorpius-GreenNode-Alpaca-7B-v1 Dataset automatically created during the evaluation run of model ignos/LeoScorpius-GreenNode-Alpaca-7B-v1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T16:30:42.646847(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of ignos/LeoScorpius-GreenNode-Alpaca-7B-v1\n\n\n\nDataset automatically created during the evaluation run of model ignos/LeoScorpius-GreenNode-Alpaca-7B-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T16:30:42.646847(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of ignos/LeoScorpius-GreenNode-Alpaca-7B-v1\n\n\n\nDataset automatically created during the evaluation run of model ignos/LeoScorpius-GreenNode-Alpaca-7B-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T16:30:42.646847(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 205, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of ignos/LeoScorpius-GreenNode-Alpaca-7B-v1\n\n\n\nDataset automatically created during the evaluation run of model ignos/LeoScorpius-GreenNode-Alpaca-7B-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T16:30:42.646847(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]" ]
e22599226e35cb3ecec7dccf51f2c36812fce31f
# Dataset Card for Evaluation run of mncai/agiin-13.6B-v0.1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [mncai/agiin-13.6B-v0.1](https://huggingface.co/mncai/agiin-13.6B-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_mncai__agiin-13.6B-v0.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T16:35:40.891850](https://huggingface.co/datasets/open-llm-leaderboard/details_mncai__agiin-13.6B-v0.1/blob/main/results_2023-12-16T16-35-40.891850.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6140808996502091, "acc_stderr": 0.03322600041693132, "acc_norm": 0.6172006340341523, "acc_norm_stderr": 0.033898195854611735, "mc1": 0.5214198286413708, "mc1_stderr": 0.01748743214471164, "mc2": 0.6797310501619931, "mc2_stderr": 0.015395432575157594 }, "harness|arc:challenge|25": { "acc": 0.6672354948805461, "acc_stderr": 0.013769863046192302, "acc_norm": 0.6945392491467577, "acc_norm_stderr": 0.013460080478002508 }, "harness|hellaswag|10": { "acc": 0.6861183031268672, "acc_stderr": 0.004631205099684944, "acc_norm": 0.8663612826130253, "acc_norm_stderr": 0.0033956833380563364 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5481481481481482, "acc_stderr": 0.04299268905480864, "acc_norm": 0.5481481481481482, "acc_norm_stderr": 0.04299268905480864 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5986842105263158, "acc_stderr": 0.039889037033362836, "acc_norm": 0.5986842105263158, "acc_norm_stderr": 0.039889037033362836 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836, "acc_norm": 0.58, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6339622641509434, "acc_stderr": 0.02964781353936525, "acc_norm": 0.6339622641509434, "acc_norm_stderr": 0.02964781353936525 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6875, "acc_stderr": 0.038760854559127644, "acc_norm": 0.6875, "acc_norm_stderr": 0.038760854559127644 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.047937248544110175, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110175 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6184971098265896, "acc_stderr": 0.037038511930995215, "acc_norm": 0.6184971098265896, "acc_norm_stderr": 0.037038511930995215 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4019607843137255, "acc_stderr": 0.04878608714466996, "acc_norm": 0.4019607843137255, "acc_norm_stderr": 0.04878608714466996 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.65, "acc_stderr": 0.0479372485441102, "acc_norm": 0.65, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5617021276595745, "acc_stderr": 0.03243618636108102, "acc_norm": 0.5617021276595745, "acc_norm_stderr": 0.03243618636108102 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.39473684210526316, "acc_stderr": 0.045981880578165414, "acc_norm": 0.39473684210526316, "acc_norm_stderr": 0.045981880578165414 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.593103448275862, "acc_stderr": 0.04093793981266236, "acc_norm": 0.593103448275862, "acc_norm_stderr": 0.04093793981266236 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42328042328042326, "acc_stderr": 0.02544636563440678, "acc_norm": 0.42328042328042326, "acc_norm_stderr": 0.02544636563440678 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42857142857142855, "acc_stderr": 0.0442626668137991, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.0442626668137991 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.37, "acc_stderr": 0.048523658709391, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7258064516129032, "acc_stderr": 0.025378139970885203, "acc_norm": 0.7258064516129032, "acc_norm_stderr": 0.025378139970885203 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4827586206896552, "acc_stderr": 0.035158955511656986, "acc_norm": 0.4827586206896552, "acc_norm_stderr": 0.035158955511656986 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.63, "acc_stderr": 0.04852365870939098, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939098 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7818181818181819, "acc_stderr": 0.03225078108306289, "acc_norm": 0.7818181818181819, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7727272727272727, "acc_stderr": 0.02985751567338642, "acc_norm": 0.7727272727272727, "acc_norm_stderr": 0.02985751567338642 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8134715025906736, "acc_stderr": 0.028112091210117467, "acc_norm": 0.8134715025906736, "acc_norm_stderr": 0.028112091210117467 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6564102564102564, "acc_stderr": 0.024078696580635474, "acc_norm": 0.6564102564102564, "acc_norm_stderr": 0.024078696580635474 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34814814814814815, "acc_stderr": 0.029045600290616255, "acc_norm": 0.34814814814814815, "acc_norm_stderr": 0.029045600290616255 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6302521008403361, "acc_stderr": 0.031357095996135904, "acc_norm": 0.6302521008403361, "acc_norm_stderr": 0.031357095996135904 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.03861557546255169, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.03861557546255169 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8220183486238533, "acc_stderr": 0.016399436366612896, "acc_norm": 0.8220183486238533, "acc_norm_stderr": 0.016399436366612896 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5555555555555556, "acc_stderr": 0.03388857118502325, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.03388857118502325 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7990196078431373, "acc_stderr": 0.028125972265654366, "acc_norm": 0.7990196078431373, "acc_norm_stderr": 0.028125972265654366 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7805907172995781, "acc_stderr": 0.026939106581553945, "acc_norm": 0.7805907172995781, "acc_norm_stderr": 0.026939106581553945 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6771300448430493, "acc_stderr": 0.03138147637575499, "acc_norm": 0.6771300448430493, "acc_norm_stderr": 0.03138147637575499 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7099236641221374, "acc_stderr": 0.03980066246467766, "acc_norm": 0.7099236641221374, "acc_norm_stderr": 0.03980066246467766 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8181818181818182, "acc_stderr": 0.03520893951097654, "acc_norm": 0.8181818181818182, "acc_norm_stderr": 0.03520893951097654 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7222222222222222, "acc_stderr": 0.04330043749650741, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.04330043749650741 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7361963190184049, "acc_stderr": 0.03462419931615624, "acc_norm": 0.7361963190184049, "acc_norm_stderr": 0.03462419931615624 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4642857142857143, "acc_stderr": 0.04733667890053756, "acc_norm": 0.4642857142857143, "acc_norm_stderr": 0.04733667890053756 }, "harness|hendrycksTest-management|5": { "acc": 0.7475728155339806, "acc_stderr": 0.04301250399690878, "acc_norm": 0.7475728155339806, "acc_norm_stderr": 0.04301250399690878 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8461538461538461, "acc_stderr": 0.023636873317489267, "acc_norm": 0.8461538461538461, "acc_norm_stderr": 0.023636873317489267 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.62, "acc_stderr": 0.04878317312145632, "acc_norm": 0.62, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7573435504469987, "acc_stderr": 0.015329888940899867, "acc_norm": 0.7573435504469987, "acc_norm_stderr": 0.015329888940899867 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6791907514450867, "acc_stderr": 0.025131000233647886, "acc_norm": 0.6791907514450867, "acc_norm_stderr": 0.025131000233647886 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.46033519553072627, "acc_stderr": 0.016669799592112025, "acc_norm": 0.46033519553072627, "acc_norm_stderr": 0.016669799592112025 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6535947712418301, "acc_stderr": 0.027245613047215355, "acc_norm": 0.6535947712418301, "acc_norm_stderr": 0.027245613047215355 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6784565916398714, "acc_stderr": 0.026527724079528872, "acc_norm": 0.6784565916398714, "acc_norm_stderr": 0.026527724079528872 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6759259259259259, "acc_stderr": 0.026041766202717163, "acc_norm": 0.6759259259259259, "acc_norm_stderr": 0.026041766202717163 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4219858156028369, "acc_stderr": 0.029462189233370593, "acc_norm": 0.4219858156028369, "acc_norm_stderr": 0.029462189233370593 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.47327249022164275, "acc_stderr": 0.012751977967676008, "acc_norm": 0.47327249022164275, "acc_norm_stderr": 0.012751977967676008 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6360294117647058, "acc_stderr": 0.02922719246003203, "acc_norm": 0.6360294117647058, "acc_norm_stderr": 0.02922719246003203 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6486928104575164, "acc_stderr": 0.019312676065786558, "acc_norm": 0.6486928104575164, "acc_norm_stderr": 0.019312676065786558 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.0449429086625209, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.0449429086625209 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6285714285714286, "acc_stderr": 0.030932858792789845, "acc_norm": 0.6285714285714286, "acc_norm_stderr": 0.030932858792789845 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8258706467661692, "acc_stderr": 0.026814951200421603, "acc_norm": 0.8258706467661692, "acc_norm_stderr": 0.026814951200421603 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.8, "acc_stderr": 0.04020151261036844, "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036844 }, "harness|hendrycksTest-virology|5": { "acc": 0.5060240963855421, "acc_stderr": 0.03892212195333045, "acc_norm": 0.5060240963855421, "acc_norm_stderr": 0.03892212195333045 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7777777777777778, "acc_stderr": 0.03188578017686398, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.03188578017686398 }, "harness|truthfulqa:mc|0": { "mc1": 0.5214198286413708, "mc1_stderr": 0.01748743214471164, "mc2": 0.6797310501619931, "mc2_stderr": 0.015395432575157594 }, "harness|winogrande|5": { "acc": 0.7868981846882399, "acc_stderr": 0.011508957690722743 }, "harness|gsm8k|5": { "acc": 0.46474601971190294, "acc_stderr": 0.01373820799017732 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_mncai__agiin-13.6B-v0.1
[ "region:us" ]
2023-12-16T16:38:35+00:00
{"pretty_name": "Evaluation run of mncai/agiin-13.6B-v0.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [mncai/agiin-13.6B-v0.1](https://huggingface.co/mncai/agiin-13.6B-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_mncai__agiin-13.6B-v0.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T16:35:40.891850](https://huggingface.co/datasets/open-llm-leaderboard/details_mncai__agiin-13.6B-v0.1/blob/main/results_2023-12-16T16-35-40.891850.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6140808996502091,\n \"acc_stderr\": 0.03322600041693132,\n \"acc_norm\": 0.6172006340341523,\n \"acc_norm_stderr\": 0.033898195854611735,\n \"mc1\": 0.5214198286413708,\n \"mc1_stderr\": 0.01748743214471164,\n \"mc2\": 0.6797310501619931,\n \"mc2_stderr\": 0.015395432575157594\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6672354948805461,\n \"acc_stderr\": 0.013769863046192302,\n \"acc_norm\": 0.6945392491467577,\n \"acc_norm_stderr\": 0.013460080478002508\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6861183031268672,\n \"acc_stderr\": 0.004631205099684944,\n \"acc_norm\": 0.8663612826130253,\n \"acc_norm_stderr\": 0.0033956833380563364\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5481481481481482,\n \"acc_stderr\": 0.04299268905480864,\n \"acc_norm\": 0.5481481481481482,\n \"acc_norm_stderr\": 0.04299268905480864\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5986842105263158,\n \"acc_stderr\": 0.039889037033362836,\n \"acc_norm\": 0.5986842105263158,\n \"acc_norm_stderr\": 0.039889037033362836\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6339622641509434,\n \"acc_stderr\": 0.02964781353936525,\n \"acc_norm\": 0.6339622641509434,\n \"acc_norm_stderr\": 0.02964781353936525\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6875,\n \"acc_stderr\": 0.038760854559127644,\n \"acc_norm\": 0.6875,\n \"acc_norm_stderr\": 0.038760854559127644\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110175,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110175\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6184971098265896,\n \"acc_stderr\": 0.037038511930995215,\n \"acc_norm\": 0.6184971098265896,\n \"acc_norm_stderr\": 0.037038511930995215\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4019607843137255,\n \"acc_stderr\": 0.04878608714466996,\n \"acc_norm\": 0.4019607843137255,\n \"acc_norm_stderr\": 0.04878608714466996\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5617021276595745,\n \"acc_stderr\": 0.03243618636108102,\n \"acc_norm\": 0.5617021276595745,\n \"acc_norm_stderr\": 0.03243618636108102\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.39473684210526316,\n \"acc_stderr\": 0.045981880578165414,\n \"acc_norm\": 0.39473684210526316,\n \"acc_norm_stderr\": 0.045981880578165414\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.593103448275862,\n \"acc_stderr\": 0.04093793981266236,\n \"acc_norm\": 0.593103448275862,\n \"acc_norm_stderr\": 0.04093793981266236\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42328042328042326,\n \"acc_stderr\": 0.02544636563440678,\n \"acc_norm\": 0.42328042328042326,\n \"acc_norm_stderr\": 0.02544636563440678\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.0442626668137991,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.0442626668137991\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7258064516129032,\n \"acc_stderr\": 0.025378139970885203,\n \"acc_norm\": 0.7258064516129032,\n \"acc_norm_stderr\": 0.025378139970885203\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4827586206896552,\n \"acc_stderr\": 0.035158955511656986,\n \"acc_norm\": 0.4827586206896552,\n \"acc_norm_stderr\": 0.035158955511656986\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.04852365870939098,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.04852365870939098\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7727272727272727,\n \"acc_stderr\": 0.02985751567338642,\n \"acc_norm\": 0.7727272727272727,\n \"acc_norm_stderr\": 0.02985751567338642\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8134715025906736,\n \"acc_stderr\": 0.028112091210117467,\n \"acc_norm\": 0.8134715025906736,\n \"acc_norm_stderr\": 0.028112091210117467\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6564102564102564,\n \"acc_stderr\": 0.024078696580635474,\n \"acc_norm\": 0.6564102564102564,\n \"acc_norm_stderr\": 0.024078696580635474\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34814814814814815,\n \"acc_stderr\": 0.029045600290616255,\n \"acc_norm\": 0.34814814814814815,\n \"acc_norm_stderr\": 0.029045600290616255\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6302521008403361,\n \"acc_stderr\": 0.031357095996135904,\n \"acc_norm\": 0.6302521008403361,\n \"acc_norm_stderr\": 0.031357095996135904\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33774834437086093,\n \"acc_stderr\": 0.03861557546255169,\n \"acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.03861557546255169\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8220183486238533,\n \"acc_stderr\": 0.016399436366612896,\n \"acc_norm\": 0.8220183486238533,\n \"acc_norm_stderr\": 0.016399436366612896\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5555555555555556,\n \"acc_stderr\": 0.03388857118502325,\n \"acc_norm\": 0.5555555555555556,\n \"acc_norm_stderr\": 0.03388857118502325\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7990196078431373,\n \"acc_stderr\": 0.028125972265654366,\n \"acc_norm\": 0.7990196078431373,\n \"acc_norm_stderr\": 0.028125972265654366\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7805907172995781,\n \"acc_stderr\": 0.026939106581553945,\n \"acc_norm\": 0.7805907172995781,\n \"acc_norm_stderr\": 0.026939106581553945\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6771300448430493,\n \"acc_stderr\": 0.03138147637575499,\n \"acc_norm\": 0.6771300448430493,\n \"acc_norm_stderr\": 0.03138147637575499\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7099236641221374,\n \"acc_stderr\": 0.03980066246467766,\n \"acc_norm\": 0.7099236641221374,\n \"acc_norm_stderr\": 0.03980066246467766\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8181818181818182,\n \"acc_stderr\": 0.03520893951097654,\n \"acc_norm\": 0.8181818181818182,\n \"acc_norm_stderr\": 0.03520893951097654\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.04330043749650741,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.04330043749650741\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7361963190184049,\n \"acc_stderr\": 0.03462419931615624,\n \"acc_norm\": 0.7361963190184049,\n \"acc_norm_stderr\": 0.03462419931615624\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7475728155339806,\n \"acc_stderr\": 0.04301250399690878,\n \"acc_norm\": 0.7475728155339806,\n \"acc_norm_stderr\": 0.04301250399690878\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8461538461538461,\n \"acc_stderr\": 0.023636873317489267,\n \"acc_norm\": 0.8461538461538461,\n \"acc_norm_stderr\": 0.023636873317489267\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7573435504469987,\n \"acc_stderr\": 0.015329888940899867,\n \"acc_norm\": 0.7573435504469987,\n \"acc_norm_stderr\": 0.015329888940899867\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6791907514450867,\n \"acc_stderr\": 0.025131000233647886,\n \"acc_norm\": 0.6791907514450867,\n \"acc_norm_stderr\": 0.025131000233647886\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.46033519553072627,\n \"acc_stderr\": 0.016669799592112025,\n \"acc_norm\": 0.46033519553072627,\n \"acc_norm_stderr\": 0.016669799592112025\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6535947712418301,\n \"acc_stderr\": 0.027245613047215355,\n \"acc_norm\": 0.6535947712418301,\n \"acc_norm_stderr\": 0.027245613047215355\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6784565916398714,\n \"acc_stderr\": 0.026527724079528872,\n \"acc_norm\": 0.6784565916398714,\n \"acc_norm_stderr\": 0.026527724079528872\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6759259259259259,\n \"acc_stderr\": 0.026041766202717163,\n \"acc_norm\": 0.6759259259259259,\n \"acc_norm_stderr\": 0.026041766202717163\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4219858156028369,\n \"acc_stderr\": 0.029462189233370593,\n \"acc_norm\": 0.4219858156028369,\n \"acc_norm_stderr\": 0.029462189233370593\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.47327249022164275,\n \"acc_stderr\": 0.012751977967676008,\n \"acc_norm\": 0.47327249022164275,\n \"acc_norm_stderr\": 0.012751977967676008\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6360294117647058,\n \"acc_stderr\": 0.02922719246003203,\n \"acc_norm\": 0.6360294117647058,\n \"acc_norm_stderr\": 0.02922719246003203\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6486928104575164,\n \"acc_stderr\": 0.019312676065786558,\n \"acc_norm\": 0.6486928104575164,\n \"acc_norm_stderr\": 0.019312676065786558\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6285714285714286,\n \"acc_stderr\": 0.030932858792789845,\n \"acc_norm\": 0.6285714285714286,\n \"acc_norm_stderr\": 0.030932858792789845\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8258706467661692,\n \"acc_stderr\": 0.026814951200421603,\n \"acc_norm\": 0.8258706467661692,\n \"acc_norm_stderr\": 0.026814951200421603\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036844,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036844\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5060240963855421,\n \"acc_stderr\": 0.03892212195333045,\n \"acc_norm\": 0.5060240963855421,\n \"acc_norm_stderr\": 0.03892212195333045\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.03188578017686398,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.03188578017686398\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5214198286413708,\n \"mc1_stderr\": 0.01748743214471164,\n \"mc2\": 0.6797310501619931,\n \"mc2_stderr\": 0.015395432575157594\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7868981846882399,\n \"acc_stderr\": 0.011508957690722743\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.46474601971190294,\n \"acc_stderr\": 0.01373820799017732\n }\n}\n```", "repo_url": "https://huggingface.co/mncai/agiin-13.6B-v0.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|arc:challenge|25_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|gsm8k|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hellaswag|10_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T16-35-40.891850.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["**/details_harness|winogrande|5_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T16-35-40.891850.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T16_35_40.891850", "path": ["results_2023-12-16T16-35-40.891850.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T16-35-40.891850.parquet"]}]}]}
2023-12-16T16:39:19+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of mncai/agiin-13.6B-v0.1 Dataset automatically created during the evaluation run of model mncai/agiin-13.6B-v0.1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T16:35:40.891850(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of mncai/agiin-13.6B-v0.1\n\n\n\nDataset automatically created during the evaluation run of model mncai/agiin-13.6B-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T16:35:40.891850(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of mncai/agiin-13.6B-v0.1\n\n\n\nDataset automatically created during the evaluation run of model mncai/agiin-13.6B-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T16:35:40.891850(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 185, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of mncai/agiin-13.6B-v0.1\n\n\n\nDataset automatically created during the evaluation run of model mncai/agiin-13.6B-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T16:35:40.891850(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
ef23a15e6fce911e36c1c216c594818aa1de4b18
# Dataset of akari (Blue Archive) This is the dataset of akari (Blue Archive), containing 57 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). This is a WebUI contains crawlers and other thing: ([LittleAppleWebUI](https://github.com/LittleApple-fp16/LittleAppleWebUI)) | Name | Images | Download | Description | |:----------------|---------:|:----------------------------------------|:-----------------------------------------------------------------------------------------| | raw | 57 | [Download](dataset-raw.zip) | Raw data with meta information. | | raw-stage3 | 160 | [Download](dataset-raw-stage3.zip) | 3-stage cropped raw data with meta information. | | raw-stage3-eyes | 186 | [Download](dataset-raw-stage3-eyes.zip) | 3-stage cropped (with eye-focus) raw data with meta information. | | 384x512 | 57 | [Download](dataset-384x512.zip) | 384x512 aligned dataset. | | 512x704 | 57 | [Download](dataset-512x704.zip) | 512x704 aligned dataset. | | 640x880 | 57 | [Download](dataset-640x880.zip) | 640x880 aligned dataset. | | stage3-640 | 160 | [Download](dataset-stage3-640.zip) | 3-stage cropped dataset with the shorter side not exceeding 640 pixels. | | stage3-800 | 160 | [Download](dataset-stage3-800.zip) | 3-stage cropped dataset with the shorter side not exceeding 800 pixels. | | stage3-p512-640 | 154 | [Download](dataset-stage3-p512-640.zip) | 3-stage cropped dataset with the area not less than 512x512 pixels. | | stage3-eyes-640 | 186 | [Download](dataset-stage3-eyes-640.zip) | 3-stage cropped (with eye-focus) dataset with the shorter side not exceeding 640 pixels. | | stage3-eyes-800 | 186 | [Download](dataset-stage3-eyes-800.zip) | 3-stage cropped (with eye-focus) dataset with the shorter side not exceeding 800 pixels. |
AppleHarem/akari_bluearchive
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-12-16T16:46:19+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2023-12-16T16:46:31+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of akari (Blue Archive) =============================== This is the dataset of akari (Blue Archive), containing 57 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). This is a WebUI contains crawlers and other thing: (LittleAppleWebUI)
[]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n" ]
[ 44 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n" ]
fc69e4fcc504d13393a769cad2ab6bd1e5cc473d
# SRTM 1 Arc-Second Global GeoTIFF heightmaps of the Earth's surface labelled according to latitude and longitude. ## Mission Description The Shuttle Radar Topography Mission (SRTM) was flown aboard the space shuttle Endeavour February 11-22, 2000. The National Aeronautics and Space Administration (NASA) and the National Geospatial-Intelligence Agency (NGA) participated in an international project to acquire radar data which were used to create the first near-global set of land elevations. The radars used during the SRTM mission were actually developed and flown on two Endeavour missions in 1994. The C-band Spaceborne Imaging Radar and the X-Band Synthetic Aperture Radar (X-SAR) hardware were used on board the space shuttle in April and October 1994 to gather data about Earth's environment. The technology was modified for the SRTM mission to collect interferometric radar, which compared two radar images or signals taken at slightly different angles. This mission used single-pass interferometry, which acquired two signals at the same time by using two different radar antennas. An antenna located on board the space shuttle collected one data set and the other data set was collected by an antenna located at the end of a 60-meter mast that extended from the shuttle. Differences between the two signals allowed for the calculation of surface elevation. Endeavour orbited Earth 16 times each day during the 11-day mission, completing 176 orbits. SRTM successfully collected radar data over 80% of the Earth's land surface between 60° north and 56° south latitude with data points posted every 1 arc-second (approximately 30 meters). ## Original Dataset The original dataset as well as the [SRTM Non-Void Filled](https://doi.org/10.5066/F7K072R7) and [SRTM Void Filled](https://doi.org/10.5066/F7F76B1X) variants can be accessed on [EarthExplorer](https://earthexplorer.usgs.gov/). ## Digital Object Identifier (DOI) [Shuttle Radar Topography Mission 1 Arc-Second Global (Digital Object Identifier (DOI) number: /10.5066/F7PR7TFT](https://doi.org/10.5066/F7PR7TFT)
novaia/srtm-1-arc-second-global
[ "task_categories:image-classification", "task_categories:unconditional-image-generation", "size_categories:10K<n<100K", "region:us" ]
2023-12-16T16:51:27+00:00
{"size_categories": ["10K<n<100K"], "task_categories": ["image-classification", "unconditional-image-generation"]}
2024-01-15T20:48:06+00:00
[]
[]
TAGS #task_categories-image-classification #task_categories-unconditional-image-generation #size_categories-10K<n<100K #region-us
# SRTM 1 Arc-Second Global GeoTIFF heightmaps of the Earth's surface labelled according to latitude and longitude. ## Mission Description The Shuttle Radar Topography Mission (SRTM) was flown aboard the space shuttle Endeavour February 11-22, 2000. The National Aeronautics and Space Administration (NASA) and the National Geospatial-Intelligence Agency (NGA) participated in an international project to acquire radar data which were used to create the first near-global set of land elevations. The radars used during the SRTM mission were actually developed and flown on two Endeavour missions in 1994. The C-band Spaceborne Imaging Radar and the X-Band Synthetic Aperture Radar (X-SAR) hardware were used on board the space shuttle in April and October 1994 to gather data about Earth's environment. The technology was modified for the SRTM mission to collect interferometric radar, which compared two radar images or signals taken at slightly different angles. This mission used single-pass interferometry, which acquired two signals at the same time by using two different radar antennas. An antenna located on board the space shuttle collected one data set and the other data set was collected by an antenna located at the end of a 60-meter mast that extended from the shuttle. Differences between the two signals allowed for the calculation of surface elevation. Endeavour orbited Earth 16 times each day during the 11-day mission, completing 176 orbits. SRTM successfully collected radar data over 80% of the Earth's land surface between 60° north and 56° south latitude with data points posted every 1 arc-second (approximately 30 meters). ## Original Dataset The original dataset as well as the SRTM Non-Void Filled and SRTM Void Filled variants can be accessed on EarthExplorer. ## Digital Object Identifier (DOI) Shuttle Radar Topography Mission 1 Arc-Second Global (Digital Object Identifier (DOI) number: /10.5066/F7PR7TFT
[ "# SRTM 1 Arc-Second Global\nGeoTIFF heightmaps of the Earth's surface labelled according to latitude and longitude.", "## Mission Description\nThe Shuttle Radar Topography Mission (SRTM) was flown aboard the space shuttle Endeavour February 11-22, 2000. The National Aeronautics and Space Administration (NASA) and the National Geospatial-Intelligence Agency (NGA) participated in an international project to acquire radar data which were used to create the first near-global set of land elevations.\n\nThe radars used during the SRTM mission were actually developed and flown on two Endeavour missions in 1994. The C-band Spaceborne Imaging Radar and the X-Band Synthetic Aperture Radar (X-SAR) hardware were used on board the space shuttle in April and October 1994 to gather data about Earth's environment. The technology was modified for the SRTM mission to collect interferometric radar, which compared two radar images or signals taken at slightly different angles. This mission used single-pass interferometry, which acquired two signals at the same time by using two different radar antennas. An antenna located on board the space shuttle collected one data set and the other data set was collected by an antenna located at the end of a 60-meter mast that extended from the shuttle. Differences between the two signals allowed for the calculation of surface elevation.\n\nEndeavour orbited Earth 16 times each day during the 11-day mission, completing 176 orbits. SRTM successfully collected radar data over 80% of the Earth's land surface between 60° north and 56° south latitude with data points posted every 1 arc-second (approximately 30 meters).", "## Original Dataset\nThe original dataset as well as the SRTM Non-Void Filled and SRTM Void Filled variants can be accessed on EarthExplorer.", "## Digital Object Identifier (DOI)\nShuttle Radar Topography Mission 1 Arc-Second Global (Digital Object Identifier (DOI) number: /10.5066/F7PR7TFT" ]
[ "TAGS\n#task_categories-image-classification #task_categories-unconditional-image-generation #size_categories-10K<n<100K #region-us \n", "# SRTM 1 Arc-Second Global\nGeoTIFF heightmaps of the Earth's surface labelled according to latitude and longitude.", "## Mission Description\nThe Shuttle Radar Topography Mission (SRTM) was flown aboard the space shuttle Endeavour February 11-22, 2000. The National Aeronautics and Space Administration (NASA) and the National Geospatial-Intelligence Agency (NGA) participated in an international project to acquire radar data which were used to create the first near-global set of land elevations.\n\nThe radars used during the SRTM mission were actually developed and flown on two Endeavour missions in 1994. The C-band Spaceborne Imaging Radar and the X-Band Synthetic Aperture Radar (X-SAR) hardware were used on board the space shuttle in April and October 1994 to gather data about Earth's environment. The technology was modified for the SRTM mission to collect interferometric radar, which compared two radar images or signals taken at slightly different angles. This mission used single-pass interferometry, which acquired two signals at the same time by using two different radar antennas. An antenna located on board the space shuttle collected one data set and the other data set was collected by an antenna located at the end of a 60-meter mast that extended from the shuttle. Differences between the two signals allowed for the calculation of surface elevation.\n\nEndeavour orbited Earth 16 times each day during the 11-day mission, completing 176 orbits. SRTM successfully collected radar data over 80% of the Earth's land surface between 60° north and 56° south latitude with data points posted every 1 arc-second (approximately 30 meters).", "## Original Dataset\nThe original dataset as well as the SRTM Non-Void Filled and SRTM Void Filled variants can be accessed on EarthExplorer.", "## Digital Object Identifier (DOI)\nShuttle Radar Topography Mission 1 Arc-Second Global (Digital Object Identifier (DOI) number: /10.5066/F7PR7TFT" ]
[ 44, 32, 351, 39, 46 ]
[ "passage: TAGS\n#task_categories-image-classification #task_categories-unconditional-image-generation #size_categories-10K<n<100K #region-us \n# SRTM 1 Arc-Second Global\nGeoTIFF heightmaps of the Earth's surface labelled according to latitude and longitude.## Mission Description\nThe Shuttle Radar Topography Mission (SRTM) was flown aboard the space shuttle Endeavour February 11-22, 2000. The National Aeronautics and Space Administration (NASA) and the National Geospatial-Intelligence Agency (NGA) participated in an international project to acquire radar data which were used to create the first near-global set of land elevations.\n\nThe radars used during the SRTM mission were actually developed and flown on two Endeavour missions in 1994. The C-band Spaceborne Imaging Radar and the X-Band Synthetic Aperture Radar (X-SAR) hardware were used on board the space shuttle in April and October 1994 to gather data about Earth's environment. The technology was modified for the SRTM mission to collect interferometric radar, which compared two radar images or signals taken at slightly different angles. This mission used single-pass interferometry, which acquired two signals at the same time by using two different radar antennas. An antenna located on board the space shuttle collected one data set and the other data set was collected by an antenna located at the end of a 60-meter mast that extended from the shuttle. Differences between the two signals allowed for the calculation of surface elevation.\n\nEndeavour orbited Earth 16 times each day during the 11-day mission, completing 176 orbits. SRTM successfully collected radar data over 80% of the Earth's land surface between 60° north and 56° south latitude with data points posted every 1 arc-second (approximately 30 meters).## Original Dataset\nThe original dataset as well as the SRTM Non-Void Filled and SRTM Void Filled variants can be accessed on EarthExplorer." ]
66d688811a2e91ffc5de301f9eaab219ba069de1
# Dataset Card for Evaluation run of Dans-DiscountModels/ShearedLlama-1.3b-FFT-Test1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Dans-DiscountModels/ShearedLlama-1.3b-FFT-Test1](https://huggingface.co/Dans-DiscountModels/ShearedLlama-1.3b-FFT-Test1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Dans-DiscountModels__ShearedLlama-1.3b-FFT-Test1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T16:48:32.106245](https://huggingface.co/datasets/open-llm-leaderboard/details_Dans-DiscountModels__ShearedLlama-1.3b-FFT-Test1/blob/main/results_2023-12-16T16-48-32.106245.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.26194360492974045, "acc_stderr": 0.031003587918478445, "acc_norm": 0.26393590768678044, "acc_norm_stderr": 0.0318032795070849, "mc1": 0.22766217870257038, "mc1_stderr": 0.014679255032111075, "mc2": 0.3696758746746233, "mc2_stderr": 0.013710142031833798 }, "harness|arc:challenge|25": { "acc": 0.2935153583617747, "acc_stderr": 0.013307250444941122, "acc_norm": 0.3267918088737201, "acc_norm_stderr": 0.013706665975587336 }, "harness|hellaswag|10": { "acc": 0.4500099581756622, "acc_stderr": 0.004964779805180661, "acc_norm": 0.5998805018920533, "acc_norm_stderr": 0.0048892106289079775 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.27, "acc_stderr": 0.044619604333847415, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847415 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.3037037037037037, "acc_stderr": 0.03972552884785137, "acc_norm": 0.3037037037037037, "acc_norm_stderr": 0.03972552884785137 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.17763157894736842, "acc_stderr": 0.03110318238312338, "acc_norm": 0.17763157894736842, "acc_norm_stderr": 0.03110318238312338 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.2679245283018868, "acc_stderr": 0.027257260322494845, "acc_norm": 0.2679245283018868, "acc_norm_stderr": 0.027257260322494845 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2708333333333333, "acc_stderr": 0.03716177437566016, "acc_norm": 0.2708333333333333, "acc_norm_stderr": 0.03716177437566016 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.2, "acc_stderr": 0.040201512610368445, "acc_norm": 0.2, "acc_norm_stderr": 0.040201512610368445 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.2023121387283237, "acc_stderr": 0.030631145539198823, "acc_norm": 0.2023121387283237, "acc_norm_stderr": 0.030631145539198823 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.18627450980392157, "acc_stderr": 0.03873958714149351, "acc_norm": 0.18627450980392157, "acc_norm_stderr": 0.03873958714149351 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.32, "acc_stderr": 0.04688261722621505, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.33191489361702126, "acc_stderr": 0.030783736757745647, "acc_norm": 0.33191489361702126, "acc_norm_stderr": 0.030783736757745647 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2543859649122807, "acc_stderr": 0.040969851398436716, "acc_norm": 0.2543859649122807, "acc_norm_stderr": 0.040969851398436716 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2206896551724138, "acc_stderr": 0.03455930201924811, "acc_norm": 0.2206896551724138, "acc_norm_stderr": 0.03455930201924811 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.24074074074074073, "acc_stderr": 0.0220190800122179, "acc_norm": 0.24074074074074073, "acc_norm_stderr": 0.0220190800122179 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.14285714285714285, "acc_stderr": 0.03129843185743809, "acc_norm": 0.14285714285714285, "acc_norm_stderr": 0.03129843185743809 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.26129032258064516, "acc_stderr": 0.02499305339776482, "acc_norm": 0.26129032258064516, "acc_norm_stderr": 0.02499305339776482 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.29064039408866993, "acc_stderr": 0.031947400722655415, "acc_norm": 0.29064039408866993, "acc_norm_stderr": 0.031947400722655415 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.23030303030303031, "acc_stderr": 0.03287666758603489, "acc_norm": 0.23030303030303031, "acc_norm_stderr": 0.03287666758603489 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.2222222222222222, "acc_stderr": 0.02962022787479047, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.02962022787479047 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.21761658031088082, "acc_stderr": 0.02977866303775295, "acc_norm": 0.21761658031088082, "acc_norm_stderr": 0.02977866303775295 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.23076923076923078, "acc_stderr": 0.021362027725222717, "acc_norm": 0.23076923076923078, "acc_norm_stderr": 0.021362027725222717 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.26666666666666666, "acc_stderr": 0.026962424325073845, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.026962424325073845 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.2605042016806723, "acc_stderr": 0.028510251512341937, "acc_norm": 0.2605042016806723, "acc_norm_stderr": 0.028510251512341937 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2582781456953642, "acc_stderr": 0.035737053147634576, "acc_norm": 0.2582781456953642, "acc_norm_stderr": 0.035737053147634576 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.22568807339449543, "acc_stderr": 0.01792308766780306, "acc_norm": 0.22568807339449543, "acc_norm_stderr": 0.01792308766780306 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.17592592592592593, "acc_stderr": 0.025967420958258536, "acc_norm": 0.17592592592592593, "acc_norm_stderr": 0.025967420958258536 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.24019607843137256, "acc_stderr": 0.02998373305591361, "acc_norm": 0.24019607843137256, "acc_norm_stderr": 0.02998373305591361 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.25738396624472576, "acc_stderr": 0.0284588209914603, "acc_norm": 0.25738396624472576, "acc_norm_stderr": 0.0284588209914603 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.33183856502242154, "acc_stderr": 0.031602951437766785, "acc_norm": 0.33183856502242154, "acc_norm_stderr": 0.031602951437766785 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.22900763358778625, "acc_stderr": 0.036853466317118506, "acc_norm": 0.22900763358778625, "acc_norm_stderr": 0.036853466317118506 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2975206611570248, "acc_stderr": 0.04173349148083499, "acc_norm": 0.2975206611570248, "acc_norm_stderr": 0.04173349148083499 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.2777777777777778, "acc_stderr": 0.043300437496507437, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.043300437496507437 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22699386503067484, "acc_stderr": 0.03291099578615769, "acc_norm": 0.22699386503067484, "acc_norm_stderr": 0.03291099578615769 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.32142857142857145, "acc_stderr": 0.0443280405529152, "acc_norm": 0.32142857142857145, "acc_norm_stderr": 0.0443280405529152 }, "harness|hendrycksTest-management|5": { "acc": 0.22330097087378642, "acc_stderr": 0.04123553189891431, "acc_norm": 0.22330097087378642, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.3076923076923077, "acc_stderr": 0.0302363899421731, "acc_norm": 0.3076923076923077, "acc_norm_stderr": 0.0302363899421731 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.26, "acc_stderr": 0.044084400227680794, "acc_norm": 0.26, "acc_norm_stderr": 0.044084400227680794 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.27586206896551724, "acc_stderr": 0.015982814774695625, "acc_norm": 0.27586206896551724, "acc_norm_stderr": 0.015982814774695625 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.24566473988439305, "acc_stderr": 0.02317629820399201, "acc_norm": 0.24566473988439305, "acc_norm_stderr": 0.02317629820399201 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2435754189944134, "acc_stderr": 0.014355911964767864, "acc_norm": 0.2435754189944134, "acc_norm_stderr": 0.014355911964767864 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.26143790849673204, "acc_stderr": 0.025160998214292456, "acc_norm": 0.26143790849673204, "acc_norm_stderr": 0.025160998214292456 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.27009646302250806, "acc_stderr": 0.025218040373410626, "acc_norm": 0.27009646302250806, "acc_norm_stderr": 0.025218040373410626 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.2623456790123457, "acc_stderr": 0.024477222856135114, "acc_norm": 0.2623456790123457, "acc_norm_stderr": 0.024477222856135114 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2978723404255319, "acc_stderr": 0.027281608344469414, "acc_norm": 0.2978723404255319, "acc_norm_stderr": 0.027281608344469414 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.24315514993481094, "acc_stderr": 0.010956556654417346, "acc_norm": 0.24315514993481094, "acc_norm_stderr": 0.010956556654417346 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.24632352941176472, "acc_stderr": 0.02617343857052, "acc_norm": 0.24632352941176472, "acc_norm_stderr": 0.02617343857052 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.27124183006535946, "acc_stderr": 0.017986615304030305, "acc_norm": 0.27124183006535946, "acc_norm_stderr": 0.017986615304030305 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.3090909090909091, "acc_stderr": 0.044262946482000985, "acc_norm": 0.3090909090909091, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.18775510204081633, "acc_stderr": 0.025000256039546212, "acc_norm": 0.18775510204081633, "acc_norm_stderr": 0.025000256039546212 }, "harness|hendrycksTest-sociology|5": { "acc": 0.26865671641791045, "acc_stderr": 0.03134328358208955, "acc_norm": 0.26865671641791045, "acc_norm_stderr": 0.03134328358208955 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-virology|5": { "acc": 0.3253012048192771, "acc_stderr": 0.036471685236832266, "acc_norm": 0.3253012048192771, "acc_norm_stderr": 0.036471685236832266 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.27485380116959063, "acc_stderr": 0.03424042924691583, "acc_norm": 0.27485380116959063, "acc_norm_stderr": 0.03424042924691583 }, "harness|truthfulqa:mc|0": { "mc1": 0.22766217870257038, "mc1_stderr": 0.014679255032111075, "mc2": 0.3696758746746233, "mc2_stderr": 0.013710142031833798 }, "harness|winogrande|5": { "acc": 0.5872138910812944, "acc_stderr": 0.0138370606486821 }, "harness|gsm8k|5": { "acc": 0.002274450341167551, "acc_stderr": 0.0013121578148674168 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Dans-DiscountModels__ShearedLlama-1.3b-FFT-Test1
[ "region:us" ]
2023-12-16T16:51:27+00:00
{"pretty_name": "Evaluation run of Dans-DiscountModels/ShearedLlama-1.3b-FFT-Test1", "dataset_summary": "Dataset automatically created during the evaluation run of model [Dans-DiscountModels/ShearedLlama-1.3b-FFT-Test1](https://huggingface.co/Dans-DiscountModels/ShearedLlama-1.3b-FFT-Test1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Dans-DiscountModels__ShearedLlama-1.3b-FFT-Test1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T16:48:32.106245](https://huggingface.co/datasets/open-llm-leaderboard/details_Dans-DiscountModels__ShearedLlama-1.3b-FFT-Test1/blob/main/results_2023-12-16T16-48-32.106245.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.26194360492974045,\n \"acc_stderr\": 0.031003587918478445,\n \"acc_norm\": 0.26393590768678044,\n \"acc_norm_stderr\": 0.0318032795070849,\n \"mc1\": 0.22766217870257038,\n \"mc1_stderr\": 0.014679255032111075,\n \"mc2\": 0.3696758746746233,\n \"mc2_stderr\": 0.013710142031833798\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.2935153583617747,\n \"acc_stderr\": 0.013307250444941122,\n \"acc_norm\": 0.3267918088737201,\n \"acc_norm_stderr\": 0.013706665975587336\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.4500099581756622,\n \"acc_stderr\": 0.004964779805180661,\n \"acc_norm\": 0.5998805018920533,\n \"acc_norm_stderr\": 0.0048892106289079775\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847415,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847415\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.3037037037037037,\n \"acc_stderr\": 0.03972552884785137,\n \"acc_norm\": 0.3037037037037037,\n \"acc_norm_stderr\": 0.03972552884785137\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.17763157894736842,\n \"acc_stderr\": 0.03110318238312338,\n \"acc_norm\": 0.17763157894736842,\n \"acc_norm_stderr\": 0.03110318238312338\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.0446196043338474,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.2679245283018868,\n \"acc_stderr\": 0.027257260322494845,\n \"acc_norm\": 0.2679245283018868,\n \"acc_norm_stderr\": 0.027257260322494845\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2708333333333333,\n \"acc_stderr\": 0.03716177437566016,\n \"acc_norm\": 0.2708333333333333,\n \"acc_norm_stderr\": 0.03716177437566016\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.040201512610368445,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.040201512610368445\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.2023121387283237,\n \"acc_stderr\": 0.030631145539198823,\n \"acc_norm\": 0.2023121387283237,\n \"acc_norm_stderr\": 0.030631145539198823\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.18627450980392157,\n \"acc_stderr\": 0.03873958714149351,\n \"acc_norm\": 0.18627450980392157,\n \"acc_norm_stderr\": 0.03873958714149351\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.33191489361702126,\n \"acc_stderr\": 0.030783736757745647,\n \"acc_norm\": 0.33191489361702126,\n \"acc_norm_stderr\": 0.030783736757745647\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2543859649122807,\n \"acc_stderr\": 0.040969851398436716,\n \"acc_norm\": 0.2543859649122807,\n \"acc_norm_stderr\": 0.040969851398436716\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2206896551724138,\n \"acc_stderr\": 0.03455930201924811,\n \"acc_norm\": 0.2206896551724138,\n \"acc_norm_stderr\": 0.03455930201924811\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.24074074074074073,\n \"acc_stderr\": 0.0220190800122179,\n \"acc_norm\": 0.24074074074074073,\n \"acc_norm_stderr\": 0.0220190800122179\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.14285714285714285,\n \"acc_stderr\": 0.03129843185743809,\n \"acc_norm\": 0.14285714285714285,\n \"acc_norm_stderr\": 0.03129843185743809\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.26129032258064516,\n \"acc_stderr\": 0.02499305339776482,\n \"acc_norm\": 0.26129032258064516,\n \"acc_norm_stderr\": 0.02499305339776482\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.29064039408866993,\n \"acc_stderr\": 0.031947400722655415,\n \"acc_norm\": 0.29064039408866993,\n \"acc_norm_stderr\": 0.031947400722655415\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.23030303030303031,\n \"acc_stderr\": 0.03287666758603489,\n \"acc_norm\": 0.23030303030303031,\n \"acc_norm_stderr\": 0.03287666758603489\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.2222222222222222,\n \"acc_stderr\": 0.02962022787479047,\n \"acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.02962022787479047\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.21761658031088082,\n \"acc_stderr\": 0.02977866303775295,\n \"acc_norm\": 0.21761658031088082,\n \"acc_norm_stderr\": 0.02977866303775295\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.23076923076923078,\n \"acc_stderr\": 0.021362027725222717,\n \"acc_norm\": 0.23076923076923078,\n \"acc_norm_stderr\": 0.021362027725222717\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.26666666666666666,\n \"acc_stderr\": 0.026962424325073845,\n \"acc_norm\": 0.26666666666666666,\n \"acc_norm_stderr\": 0.026962424325073845\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.2605042016806723,\n \"acc_stderr\": 0.028510251512341937,\n \"acc_norm\": 0.2605042016806723,\n \"acc_norm_stderr\": 0.028510251512341937\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2582781456953642,\n \"acc_stderr\": 0.035737053147634576,\n \"acc_norm\": 0.2582781456953642,\n \"acc_norm_stderr\": 0.035737053147634576\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.22568807339449543,\n \"acc_stderr\": 0.01792308766780306,\n \"acc_norm\": 0.22568807339449543,\n \"acc_norm_stderr\": 0.01792308766780306\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.17592592592592593,\n \"acc_stderr\": 0.025967420958258536,\n \"acc_norm\": 0.17592592592592593,\n \"acc_norm_stderr\": 0.025967420958258536\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.24019607843137256,\n \"acc_stderr\": 0.02998373305591361,\n \"acc_norm\": 0.24019607843137256,\n \"acc_norm_stderr\": 0.02998373305591361\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.25738396624472576,\n \"acc_stderr\": 0.0284588209914603,\n \"acc_norm\": 0.25738396624472576,\n \"acc_norm_stderr\": 0.0284588209914603\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.33183856502242154,\n \"acc_stderr\": 0.031602951437766785,\n \"acc_norm\": 0.33183856502242154,\n \"acc_norm_stderr\": 0.031602951437766785\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.22900763358778625,\n \"acc_stderr\": 0.036853466317118506,\n \"acc_norm\": 0.22900763358778625,\n \"acc_norm_stderr\": 0.036853466317118506\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.2975206611570248,\n \"acc_stderr\": 0.04173349148083499,\n \"acc_norm\": 0.2975206611570248,\n \"acc_norm_stderr\": 0.04173349148083499\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.2777777777777778,\n \"acc_stderr\": 0.043300437496507437,\n \"acc_norm\": 0.2777777777777778,\n \"acc_norm_stderr\": 0.043300437496507437\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.22699386503067484,\n \"acc_stderr\": 0.03291099578615769,\n \"acc_norm\": 0.22699386503067484,\n \"acc_norm_stderr\": 0.03291099578615769\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.32142857142857145,\n \"acc_stderr\": 0.0443280405529152,\n \"acc_norm\": 0.32142857142857145,\n \"acc_norm_stderr\": 0.0443280405529152\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.22330097087378642,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.22330097087378642,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.3076923076923077,\n \"acc_stderr\": 0.0302363899421731,\n \"acc_norm\": 0.3076923076923077,\n \"acc_norm_stderr\": 0.0302363899421731\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.044084400227680794,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.044084400227680794\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.27586206896551724,\n \"acc_stderr\": 0.015982814774695625,\n \"acc_norm\": 0.27586206896551724,\n \"acc_norm_stderr\": 0.015982814774695625\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.24566473988439305,\n \"acc_stderr\": 0.02317629820399201,\n \"acc_norm\": 0.24566473988439305,\n \"acc_norm_stderr\": 0.02317629820399201\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2435754189944134,\n \"acc_stderr\": 0.014355911964767864,\n \"acc_norm\": 0.2435754189944134,\n \"acc_norm_stderr\": 0.014355911964767864\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.26143790849673204,\n \"acc_stderr\": 0.025160998214292456,\n \"acc_norm\": 0.26143790849673204,\n \"acc_norm_stderr\": 0.025160998214292456\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.27009646302250806,\n \"acc_stderr\": 0.025218040373410626,\n \"acc_norm\": 0.27009646302250806,\n \"acc_norm_stderr\": 0.025218040373410626\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.2623456790123457,\n \"acc_stderr\": 0.024477222856135114,\n \"acc_norm\": 0.2623456790123457,\n \"acc_norm_stderr\": 0.024477222856135114\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.2978723404255319,\n \"acc_stderr\": 0.027281608344469414,\n \"acc_norm\": 0.2978723404255319,\n \"acc_norm_stderr\": 0.027281608344469414\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.24315514993481094,\n \"acc_stderr\": 0.010956556654417346,\n \"acc_norm\": 0.24315514993481094,\n \"acc_norm_stderr\": 0.010956556654417346\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.24632352941176472,\n \"acc_stderr\": 0.02617343857052,\n \"acc_norm\": 0.24632352941176472,\n \"acc_norm_stderr\": 0.02617343857052\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.27124183006535946,\n \"acc_stderr\": 0.017986615304030305,\n \"acc_norm\": 0.27124183006535946,\n \"acc_norm_stderr\": 0.017986615304030305\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.3090909090909091,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.3090909090909091,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.18775510204081633,\n \"acc_stderr\": 0.025000256039546212,\n \"acc_norm\": 0.18775510204081633,\n \"acc_norm_stderr\": 0.025000256039546212\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.26865671641791045,\n \"acc_stderr\": 0.03134328358208955,\n \"acc_norm\": 0.26865671641791045,\n \"acc_norm_stderr\": 0.03134328358208955\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3253012048192771,\n \"acc_stderr\": 0.036471685236832266,\n \"acc_norm\": 0.3253012048192771,\n \"acc_norm_stderr\": 0.036471685236832266\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.27485380116959063,\n \"acc_stderr\": 0.03424042924691583,\n \"acc_norm\": 0.27485380116959063,\n \"acc_norm_stderr\": 0.03424042924691583\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.22766217870257038,\n \"mc1_stderr\": 0.014679255032111075,\n \"mc2\": 0.3696758746746233,\n \"mc2_stderr\": 0.013710142031833798\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5872138910812944,\n \"acc_stderr\": 0.0138370606486821\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.002274450341167551,\n \"acc_stderr\": 0.0013121578148674168\n }\n}\n```", "repo_url": "https://huggingface.co/Dans-DiscountModels/ShearedLlama-1.3b-FFT-Test1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|arc:challenge|25_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|gsm8k|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hellaswag|10_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T16-48-32.106245.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["**/details_harness|winogrande|5_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T16-48-32.106245.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T16_48_32.106245", "path": ["results_2023-12-16T16-48-32.106245.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T16-48-32.106245.parquet"]}]}]}
2023-12-16T16:52:08+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Dans-DiscountModels/ShearedLlama-1.3b-FFT-Test1 Dataset automatically created during the evaluation run of model Dans-DiscountModels/ShearedLlama-1.3b-FFT-Test1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T16:48:32.106245(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Dans-DiscountModels/ShearedLlama-1.3b-FFT-Test1\n\n\n\nDataset automatically created during the evaluation run of model Dans-DiscountModels/ShearedLlama-1.3b-FFT-Test1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T16:48:32.106245(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Dans-DiscountModels/ShearedLlama-1.3b-FFT-Test1\n\n\n\nDataset automatically created during the evaluation run of model Dans-DiscountModels/ShearedLlama-1.3b-FFT-Test1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T16:48:32.106245(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 203, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Dans-DiscountModels/ShearedLlama-1.3b-FFT-Test1\n\n\n\nDataset automatically created during the evaluation run of model Dans-DiscountModels/ShearedLlama-1.3b-FFT-Test1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T16:48:32.106245(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
efacdbedc102e0bf0490c5d8db1376d597abfc70
# Dataset Card for Evaluation run of mindy-labs/mindy-7b <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [mindy-labs/mindy-7b](https://huggingface.co/mindy-labs/mindy-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_mindy-labs__mindy-7b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-24T18:45:34.219710](https://huggingface.co/datasets/open-llm-leaderboard/details_mindy-labs__mindy-7b/blob/main/results_2023-12-24T18-45-34.219710.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.24166495223886916, "acc_stderr": 0.030368284239861622, "acc_norm": 0.24173834204737407, "acc_norm_stderr": 0.03116492780271872, "mc1": 0.2839657282741738, "mc1_stderr": 0.015785370858396725, "mc2": NaN, "mc2_stderr": NaN }, "harness|arc:challenge|25": { "acc": 0.21928327645051193, "acc_stderr": 0.012091245787615725, "acc_norm": 0.2363481228668942, "acc_norm_stderr": 0.012414960524301825 }, "harness|hellaswag|10": { "acc": 0.25941047600079664, "acc_stderr": 0.004374153847826759, "acc_norm": 0.2582154949213304, "acc_norm_stderr": 0.004367586801776666 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.2740740740740741, "acc_stderr": 0.03853254836552003, "acc_norm": 0.2740740740740741, "acc_norm_stderr": 0.03853254836552003 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.17763157894736842, "acc_stderr": 0.031103182383123398, "acc_norm": 0.17763157894736842, "acc_norm_stderr": 0.031103182383123398 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.21509433962264152, "acc_stderr": 0.02528839450289137, "acc_norm": 0.21509433962264152, "acc_norm_stderr": 0.02528839450289137 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2569444444444444, "acc_stderr": 0.03653946969442099, "acc_norm": 0.2569444444444444, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.2, "acc_stderr": 0.04020151261036845, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.1907514450867052, "acc_stderr": 0.029957851329869337, "acc_norm": 0.1907514450867052, "acc_norm_stderr": 0.029957851329869337 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.28431372549019607, "acc_stderr": 0.04488482852329017, "acc_norm": 0.28431372549019607, "acc_norm_stderr": 0.04488482852329017 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.2936170212765957, "acc_stderr": 0.02977164271249122, "acc_norm": 0.2936170212765957, "acc_norm_stderr": 0.02977164271249122 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.21929824561403508, "acc_stderr": 0.03892431106518751, "acc_norm": 0.21929824561403508, "acc_norm_stderr": 0.03892431106518751 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2413793103448276, "acc_stderr": 0.03565998174135302, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.03565998174135302 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.23809523809523808, "acc_stderr": 0.02193587808118476, "acc_norm": 0.23809523809523808, "acc_norm_stderr": 0.02193587808118476 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04040610178208841, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04040610178208841 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.21, "acc_stderr": 0.04093601807403325, "acc_norm": 0.21, "acc_norm_stderr": 0.04093601807403325 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.1774193548387097, "acc_stderr": 0.02173254068932927, "acc_norm": 0.1774193548387097, "acc_norm_stderr": 0.02173254068932927 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.15270935960591134, "acc_stderr": 0.02530890453938063, "acc_norm": 0.15270935960591134, "acc_norm_stderr": 0.02530890453938063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03225078108306289, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.17676767676767677, "acc_stderr": 0.027178752639044915, "acc_norm": 0.17676767676767677, "acc_norm_stderr": 0.027178752639044915 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.24352331606217617, "acc_stderr": 0.030975436386845426, "acc_norm": 0.24352331606217617, "acc_norm_stderr": 0.030975436386845426 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.2846153846153846, "acc_stderr": 0.0228783227997063, "acc_norm": 0.2846153846153846, "acc_norm_stderr": 0.0228783227997063 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2111111111111111, "acc_stderr": 0.024882116857655075, "acc_norm": 0.2111111111111111, "acc_norm_stderr": 0.024882116857655075 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.27310924369747897, "acc_stderr": 0.028942004040998174, "acc_norm": 0.27310924369747897, "acc_norm_stderr": 0.028942004040998174 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.1986754966887417, "acc_stderr": 0.03257847384436776, "acc_norm": 0.1986754966887417, "acc_norm_stderr": 0.03257847384436776 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.1926605504587156, "acc_stderr": 0.016909276884936094, "acc_norm": 0.1926605504587156, "acc_norm_stderr": 0.016909276884936094 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.2037037037037037, "acc_stderr": 0.027467401804058014, "acc_norm": 0.2037037037037037, "acc_norm_stderr": 0.027467401804058014 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.25, "acc_stderr": 0.03039153369274154, "acc_norm": 0.25, "acc_norm_stderr": 0.03039153369274154 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.270042194092827, "acc_stderr": 0.028900721906293426, "acc_norm": 0.270042194092827, "acc_norm_stderr": 0.028900721906293426 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.31390134529147984, "acc_stderr": 0.031146796482972465, "acc_norm": 0.31390134529147984, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.20610687022900764, "acc_stderr": 0.03547771004159463, "acc_norm": 0.20610687022900764, "acc_norm_stderr": 0.03547771004159463 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2396694214876033, "acc_stderr": 0.03896878985070417, "acc_norm": 0.2396694214876033, "acc_norm_stderr": 0.03896878985070417 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.24074074074074073, "acc_stderr": 0.04133119440243839, "acc_norm": 0.24074074074074073, "acc_norm_stderr": 0.04133119440243839 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22085889570552147, "acc_stderr": 0.032591773927421776, "acc_norm": 0.22085889570552147, "acc_norm_stderr": 0.032591773927421776 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3125, "acc_stderr": 0.043994650575715215, "acc_norm": 0.3125, "acc_norm_stderr": 0.043994650575715215 }, "harness|hendrycksTest-management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266224, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266224 }, "harness|hendrycksTest-marketing|5": { "acc": 0.25213675213675213, "acc_stderr": 0.02844796547623102, "acc_norm": 0.25213675213675213, "acc_norm_stderr": 0.02844796547623102 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.23754789272030652, "acc_stderr": 0.015218733046150193, "acc_norm": 0.23754789272030652, "acc_norm_stderr": 0.015218733046150193 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.21965317919075145, "acc_stderr": 0.022289638852617897, "acc_norm": 0.21965317919075145, "acc_norm_stderr": 0.022289638852617897 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574915, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574915 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.22549019607843138, "acc_stderr": 0.023929155517351284, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.023929155517351284 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.27009646302250806, "acc_stderr": 0.025218040373410633, "acc_norm": 0.27009646302250806, "acc_norm_stderr": 0.025218040373410633 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.2654320987654321, "acc_stderr": 0.024569223600460845, "acc_norm": 0.2654320987654321, "acc_norm_stderr": 0.024569223600460845 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.23404255319148937, "acc_stderr": 0.025257861359432417, "acc_norm": 0.23404255319148937, "acc_norm_stderr": 0.025257861359432417 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2457627118644068, "acc_stderr": 0.010996156635142692, "acc_norm": 0.2457627118644068, "acc_norm_stderr": 0.010996156635142692 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.23897058823529413, "acc_stderr": 0.02590528064489301, "acc_norm": 0.23897058823529413, "acc_norm_stderr": 0.02590528064489301 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.25, "acc_stderr": 0.01751781884501444, "acc_norm": 0.25, "acc_norm_stderr": 0.01751781884501444 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.2727272727272727, "acc_stderr": 0.04265792110940588, "acc_norm": 0.2727272727272727, "acc_norm_stderr": 0.04265792110940588 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.18775510204081633, "acc_stderr": 0.02500025603954621, "acc_norm": 0.18775510204081633, "acc_norm_stderr": 0.02500025603954621 }, "harness|hendrycksTest-sociology|5": { "acc": 0.263681592039801, "acc_stderr": 0.03115715086935559, "acc_norm": 0.263681592039801, "acc_norm_stderr": 0.03115715086935559 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.23, "acc_stderr": 0.04229525846816508, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816508 }, "harness|hendrycksTest-virology|5": { "acc": 0.27710843373493976, "acc_stderr": 0.034843315926805875, "acc_norm": 0.27710843373493976, "acc_norm_stderr": 0.034843315926805875 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.3216374269005848, "acc_stderr": 0.03582529442573122, "acc_norm": 0.3216374269005848, "acc_norm_stderr": 0.03582529442573122 }, "harness|truthfulqa:mc|0": { "mc1": 0.2839657282741738, "mc1_stderr": 0.015785370858396725, "mc2": NaN, "mc2_stderr": NaN }, "harness|winogrande|5": { "acc": 0.494869771112865, "acc_stderr": 0.014051745961790516 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_mindy-labs__mindy-7b
[ "region:us" ]
2023-12-16T16:58:26+00:00
{"pretty_name": "Evaluation run of mindy-labs/mindy-7b", "dataset_summary": "Dataset automatically created during the evaluation run of model [mindy-labs/mindy-7b](https://huggingface.co/mindy-labs/mindy-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_mindy-labs__mindy-7b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-24T18:45:34.219710](https://huggingface.co/datasets/open-llm-leaderboard/details_mindy-labs__mindy-7b/blob/main/results_2023-12-24T18-45-34.219710.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.24166495223886916,\n \"acc_stderr\": 0.030368284239861622,\n \"acc_norm\": 0.24173834204737407,\n \"acc_norm_stderr\": 0.03116492780271872,\n \"mc1\": 0.2839657282741738,\n \"mc1_stderr\": 0.015785370858396725,\n \"mc2\": NaN,\n \"mc2_stderr\": NaN\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.21928327645051193,\n \"acc_stderr\": 0.012091245787615725,\n \"acc_norm\": 0.2363481228668942,\n \"acc_norm_stderr\": 0.012414960524301825\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.25941047600079664,\n \"acc_stderr\": 0.004374153847826759,\n \"acc_norm\": 0.2582154949213304,\n \"acc_norm_stderr\": 0.004367586801776666\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.2740740740740741,\n \"acc_stderr\": 0.03853254836552003,\n \"acc_norm\": 0.2740740740740741,\n \"acc_norm_stderr\": 0.03853254836552003\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.17763157894736842,\n \"acc_stderr\": 0.031103182383123398,\n \"acc_norm\": 0.17763157894736842,\n \"acc_norm_stderr\": 0.031103182383123398\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.21509433962264152,\n \"acc_stderr\": 0.02528839450289137,\n \"acc_norm\": 0.21509433962264152,\n \"acc_norm_stderr\": 0.02528839450289137\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2569444444444444,\n \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.2569444444444444,\n \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036845,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.1907514450867052,\n \"acc_stderr\": 0.029957851329869337,\n \"acc_norm\": 0.1907514450867052,\n \"acc_norm_stderr\": 0.029957851329869337\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.28431372549019607,\n \"acc_stderr\": 0.04488482852329017,\n \"acc_norm\": 0.28431372549019607,\n \"acc_norm_stderr\": 0.04488482852329017\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.2936170212765957,\n \"acc_stderr\": 0.02977164271249122,\n \"acc_norm\": 0.2936170212765957,\n \"acc_norm_stderr\": 0.02977164271249122\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.21929824561403508,\n \"acc_stderr\": 0.03892431106518751,\n \"acc_norm\": 0.21929824561403508,\n \"acc_norm_stderr\": 0.03892431106518751\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2413793103448276,\n \"acc_stderr\": 0.03565998174135302,\n \"acc_norm\": 0.2413793103448276,\n \"acc_norm_stderr\": 0.03565998174135302\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.23809523809523808,\n \"acc_stderr\": 0.02193587808118476,\n \"acc_norm\": 0.23809523809523808,\n \"acc_norm_stderr\": 0.02193587808118476\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.04040610178208841,\n \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.04040610178208841\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.04093601807403325,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.04093601807403325\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.1774193548387097,\n \"acc_stderr\": 0.02173254068932927,\n \"acc_norm\": 0.1774193548387097,\n \"acc_norm_stderr\": 0.02173254068932927\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.15270935960591134,\n \"acc_stderr\": 0.02530890453938063,\n \"acc_norm\": 0.15270935960591134,\n \"acc_norm_stderr\": 0.02530890453938063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.17676767676767677,\n \"acc_stderr\": 0.027178752639044915,\n \"acc_norm\": 0.17676767676767677,\n \"acc_norm_stderr\": 0.027178752639044915\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.24352331606217617,\n \"acc_stderr\": 0.030975436386845426,\n \"acc_norm\": 0.24352331606217617,\n \"acc_norm_stderr\": 0.030975436386845426\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.2846153846153846,\n \"acc_stderr\": 0.0228783227997063,\n \"acc_norm\": 0.2846153846153846,\n \"acc_norm_stderr\": 0.0228783227997063\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2111111111111111,\n \"acc_stderr\": 0.024882116857655075,\n \"acc_norm\": 0.2111111111111111,\n \"acc_norm_stderr\": 0.024882116857655075\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.27310924369747897,\n \"acc_stderr\": 0.028942004040998174,\n \"acc_norm\": 0.27310924369747897,\n \"acc_norm_stderr\": 0.028942004040998174\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.1986754966887417,\n \"acc_stderr\": 0.03257847384436776,\n \"acc_norm\": 0.1986754966887417,\n \"acc_norm_stderr\": 0.03257847384436776\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.1926605504587156,\n \"acc_stderr\": 0.016909276884936094,\n \"acc_norm\": 0.1926605504587156,\n \"acc_norm_stderr\": 0.016909276884936094\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.2037037037037037,\n \"acc_stderr\": 0.027467401804058014,\n \"acc_norm\": 0.2037037037037037,\n \"acc_norm_stderr\": 0.027467401804058014\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.03039153369274154,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.03039153369274154\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.270042194092827,\n \"acc_stderr\": 0.028900721906293426,\n \"acc_norm\": 0.270042194092827,\n \"acc_norm_stderr\": 0.028900721906293426\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.31390134529147984,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.31390134529147984,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.20610687022900764,\n \"acc_stderr\": 0.03547771004159463,\n \"acc_norm\": 0.20610687022900764,\n \"acc_norm_stderr\": 0.03547771004159463\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.2396694214876033,\n \"acc_stderr\": 0.03896878985070417,\n \"acc_norm\": 0.2396694214876033,\n \"acc_norm_stderr\": 0.03896878985070417\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.24074074074074073,\n \"acc_stderr\": 0.04133119440243839,\n \"acc_norm\": 0.24074074074074073,\n \"acc_norm_stderr\": 0.04133119440243839\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.22085889570552147,\n \"acc_stderr\": 0.032591773927421776,\n \"acc_norm\": 0.22085889570552147,\n \"acc_norm_stderr\": 0.032591773927421776\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3125,\n \"acc_stderr\": 0.043994650575715215,\n \"acc_norm\": 0.3125,\n \"acc_norm_stderr\": 0.043994650575715215\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.17475728155339806,\n \"acc_stderr\": 0.037601780060266224,\n \"acc_norm\": 0.17475728155339806,\n \"acc_norm_stderr\": 0.037601780060266224\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.25213675213675213,\n \"acc_stderr\": 0.02844796547623102,\n \"acc_norm\": 0.25213675213675213,\n \"acc_norm_stderr\": 0.02844796547623102\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.23754789272030652,\n \"acc_stderr\": 0.015218733046150193,\n \"acc_norm\": 0.23754789272030652,\n \"acc_norm_stderr\": 0.015218733046150193\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.21965317919075145,\n \"acc_stderr\": 0.022289638852617897,\n \"acc_norm\": 0.21965317919075145,\n \"acc_norm_stderr\": 0.022289638852617897\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.023929155517351284,\n \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.023929155517351284\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.27009646302250806,\n \"acc_stderr\": 0.025218040373410633,\n \"acc_norm\": 0.27009646302250806,\n \"acc_norm_stderr\": 0.025218040373410633\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.2654320987654321,\n \"acc_stderr\": 0.024569223600460845,\n \"acc_norm\": 0.2654320987654321,\n \"acc_norm_stderr\": 0.024569223600460845\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.23404255319148937,\n \"acc_stderr\": 0.025257861359432417,\n \"acc_norm\": 0.23404255319148937,\n \"acc_norm_stderr\": 0.025257861359432417\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2457627118644068,\n \"acc_stderr\": 0.010996156635142692,\n \"acc_norm\": 0.2457627118644068,\n \"acc_norm_stderr\": 0.010996156635142692\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.23897058823529413,\n \"acc_stderr\": 0.02590528064489301,\n \"acc_norm\": 0.23897058823529413,\n \"acc_norm_stderr\": 0.02590528064489301\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.01751781884501444,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.01751781884501444\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.2727272727272727,\n \"acc_stderr\": 0.04265792110940588,\n \"acc_norm\": 0.2727272727272727,\n \"acc_norm_stderr\": 0.04265792110940588\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.18775510204081633,\n \"acc_stderr\": 0.02500025603954621,\n \"acc_norm\": 0.18775510204081633,\n \"acc_norm_stderr\": 0.02500025603954621\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.263681592039801,\n \"acc_stderr\": 0.03115715086935559,\n \"acc_norm\": 0.263681592039801,\n \"acc_norm_stderr\": 0.03115715086935559\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816508,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816508\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.27710843373493976,\n \"acc_stderr\": 0.034843315926805875,\n \"acc_norm\": 0.27710843373493976,\n \"acc_norm_stderr\": 0.034843315926805875\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.3216374269005848,\n \"acc_stderr\": 0.03582529442573122,\n \"acc_norm\": 0.3216374269005848,\n \"acc_norm_stderr\": 0.03582529442573122\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2839657282741738,\n \"mc1_stderr\": 0.015785370858396725,\n \"mc2\": NaN,\n \"mc2_stderr\": NaN\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.494869771112865,\n \"acc_stderr\": 0.014051745961790516\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/mindy-labs/mindy-7b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|arc:challenge|25_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|arc:challenge|25_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|gsm8k|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|gsm8k|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hellaswag|10_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hellaswag|10_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T16-55-36.192402.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-24T18-45-34.219710.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["**/details_harness|winogrande|5_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["**/details_harness|winogrande|5_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-24T18-45-34.219710.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T16_55_36.192402", "path": ["results_2023-12-16T16-55-36.192402.parquet"]}, {"split": "2023_12_24T18_45_34.219710", "path": ["results_2023-12-24T18-45-34.219710.parquet"]}, {"split": "latest", "path": ["results_2023-12-24T18-45-34.219710.parquet"]}]}]}
2023-12-24T18:48:27+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of mindy-labs/mindy-7b Dataset automatically created during the evaluation run of model mindy-labs/mindy-7b on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-24T18:45:34.219710(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of mindy-labs/mindy-7b\n\n\n\nDataset automatically created during the evaluation run of model mindy-labs/mindy-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-24T18:45:34.219710(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of mindy-labs/mindy-7b\n\n\n\nDataset automatically created during the evaluation run of model mindy-labs/mindy-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-24T18:45:34.219710(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 181, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of mindy-labs/mindy-7b\n\n\n\nDataset automatically created during the evaluation run of model mindy-labs/mindy-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-24T18:45:34.219710(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
05fd422e8cecafa63b9b6ab91bbd5d3a1a62b2e7
# Dataset Card for Evaluation run of martyn/mistral-megamerge-dare-7b <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [martyn/mistral-megamerge-dare-7b](https://huggingface.co/martyn/mistral-megamerge-dare-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_martyn__mistral-megamerge-dare-7b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T16:59:07.341646](https://huggingface.co/datasets/open-llm-leaderboard/details_martyn__mistral-megamerge-dare-7b/blob/main/results_2023-12-16T16-59-07.341646.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.43110066802732633, "acc_stderr": 0.034328754146029136, "acc_norm": 0.43723096690924246, "acc_norm_stderr": 0.035144012683790145, "mc1": 0.35862913096695226, "mc1_stderr": 0.016789289499502022, "mc2": 0.5108336746233818, "mc2_stderr": 0.015741003892075174 }, "harness|arc:challenge|25": { "acc": 0.5136518771331058, "acc_stderr": 0.014605943429860945, "acc_norm": 0.552901023890785, "acc_norm_stderr": 0.014529380160526854 }, "harness|hellaswag|10": { "acc": 0.5077673770165305, "acc_stderr": 0.004989179286677388, "acc_norm": 0.7048396733718383, "acc_norm_stderr": 0.0045518262729780596 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4740740740740741, "acc_stderr": 0.04313531696750574, "acc_norm": 0.4740740740740741, "acc_norm_stderr": 0.04313531696750574 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.4934210526315789, "acc_stderr": 0.04068590050224971, "acc_norm": 0.4934210526315789, "acc_norm_stderr": 0.04068590050224971 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5471698113207547, "acc_stderr": 0.030635627957961816, "acc_norm": 0.5471698113207547, "acc_norm_stderr": 0.030635627957961816 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.4583333333333333, "acc_stderr": 0.04166666666666666, "acc_norm": 0.4583333333333333, "acc_norm_stderr": 0.04166666666666666 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.23, "acc_stderr": 0.04229525846816505, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816505 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.4624277456647399, "acc_stderr": 0.0380168510452446, "acc_norm": 0.4624277456647399, "acc_norm_stderr": 0.0380168510452446 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3235294117647059, "acc_stderr": 0.046550104113196177, "acc_norm": 0.3235294117647059, "acc_norm_stderr": 0.046550104113196177 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.6, "acc_stderr": 0.04923659639173309, "acc_norm": 0.6, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.3702127659574468, "acc_stderr": 0.03156564682236784, "acc_norm": 0.3702127659574468, "acc_norm_stderr": 0.03156564682236784 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2807017543859649, "acc_stderr": 0.042270544512321984, "acc_norm": 0.2807017543859649, "acc_norm_stderr": 0.042270544512321984 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.38620689655172413, "acc_stderr": 0.04057324734419036, "acc_norm": 0.38620689655172413, "acc_norm_stderr": 0.04057324734419036 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.29365079365079366, "acc_stderr": 0.023456037383982026, "acc_norm": 0.29365079365079366, "acc_norm_stderr": 0.023456037383982026 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.24603174603174602, "acc_stderr": 0.03852273364924315, "acc_norm": 0.24603174603174602, "acc_norm_stderr": 0.03852273364924315 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.3903225806451613, "acc_stderr": 0.027751256636969576, "acc_norm": 0.3903225806451613, "acc_norm_stderr": 0.027751256636969576 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.32019704433497537, "acc_stderr": 0.032826493853041504, "acc_norm": 0.32019704433497537, "acc_norm_stderr": 0.032826493853041504 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.2606060606060606, "acc_stderr": 0.03427743175816524, "acc_norm": 0.2606060606060606, "acc_norm_stderr": 0.03427743175816524 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.5555555555555556, "acc_stderr": 0.035402943770953675, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.035402943770953675 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.6528497409326425, "acc_stderr": 0.03435696168361355, "acc_norm": 0.6528497409326425, "acc_norm_stderr": 0.03435696168361355 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.40512820512820513, "acc_stderr": 0.024890471769938145, "acc_norm": 0.40512820512820513, "acc_norm_stderr": 0.024890471769938145 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2851851851851852, "acc_stderr": 0.027528599210340492, "acc_norm": 0.2851851851851852, "acc_norm_stderr": 0.027528599210340492 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.4369747899159664, "acc_stderr": 0.03221943636566197, "acc_norm": 0.4369747899159664, "acc_norm_stderr": 0.03221943636566197 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2913907284768212, "acc_stderr": 0.03710185726119995, "acc_norm": 0.2913907284768212, "acc_norm_stderr": 0.03710185726119995 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.5926605504587156, "acc_stderr": 0.021065986244412895, "acc_norm": 0.5926605504587156, "acc_norm_stderr": 0.021065986244412895 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.42592592592592593, "acc_stderr": 0.03372343271653063, "acc_norm": 0.42592592592592593, "acc_norm_stderr": 0.03372343271653063 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.37745098039215685, "acc_stderr": 0.03402272044340705, "acc_norm": 0.37745098039215685, "acc_norm_stderr": 0.03402272044340705 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.5189873417721519, "acc_stderr": 0.03252375148090448, "acc_norm": 0.5189873417721519, "acc_norm_stderr": 0.03252375148090448 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.5112107623318386, "acc_stderr": 0.033549366530984746, "acc_norm": 0.5112107623318386, "acc_norm_stderr": 0.033549366530984746 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.45038167938931295, "acc_stderr": 0.04363643698524779, "acc_norm": 0.45038167938931295, "acc_norm_stderr": 0.04363643698524779 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6033057851239669, "acc_stderr": 0.04465869780531009, "acc_norm": 0.6033057851239669, "acc_norm_stderr": 0.04465869780531009 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.5277777777777778, "acc_stderr": 0.048262172941398944, "acc_norm": 0.5277777777777778, "acc_norm_stderr": 0.048262172941398944 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.4539877300613497, "acc_stderr": 0.0391170190467718, "acc_norm": 0.4539877300613497, "acc_norm_stderr": 0.0391170190467718 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.41964285714285715, "acc_stderr": 0.04684099321077106, "acc_norm": 0.41964285714285715, "acc_norm_stderr": 0.04684099321077106 }, "harness|hendrycksTest-management|5": { "acc": 0.6116504854368932, "acc_stderr": 0.04825729337356389, "acc_norm": 0.6116504854368932, "acc_norm_stderr": 0.04825729337356389 }, "harness|hendrycksTest-marketing|5": { "acc": 0.6666666666666666, "acc_stderr": 0.030882736974138656, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.030882736974138656 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.43, "acc_stderr": 0.04975698519562427, "acc_norm": 0.43, "acc_norm_stderr": 0.04975698519562427 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.6015325670498084, "acc_stderr": 0.017507438602777408, "acc_norm": 0.6015325670498084, "acc_norm_stderr": 0.017507438602777408 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.407514450867052, "acc_stderr": 0.0264545781469315, "acc_norm": 0.407514450867052, "acc_norm_stderr": 0.0264545781469315 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3229050279329609, "acc_stderr": 0.01563844038024149, "acc_norm": 0.3229050279329609, "acc_norm_stderr": 0.01563844038024149 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.46405228758169936, "acc_stderr": 0.028555827516528777, "acc_norm": 0.46405228758169936, "acc_norm_stderr": 0.028555827516528777 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.4694533762057878, "acc_stderr": 0.02834504586484068, "acc_norm": 0.4694533762057878, "acc_norm_stderr": 0.02834504586484068 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.48148148148148145, "acc_stderr": 0.027801656212323674, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.027801656212323674 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.3049645390070922, "acc_stderr": 0.027464708442022128, "acc_norm": 0.3049645390070922, "acc_norm_stderr": 0.027464708442022128 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2685788787483703, "acc_stderr": 0.011320056629121741, "acc_norm": 0.2685788787483703, "acc_norm_stderr": 0.011320056629121741 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.39705882352941174, "acc_stderr": 0.029722152099280072, "acc_norm": 0.39705882352941174, "acc_norm_stderr": 0.029722152099280072 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.40032679738562094, "acc_stderr": 0.01982184368827177, "acc_norm": 0.40032679738562094, "acc_norm_stderr": 0.01982184368827177 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5727272727272728, "acc_stderr": 0.04738198703545483, "acc_norm": 0.5727272727272728, "acc_norm_stderr": 0.04738198703545483 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.47346938775510206, "acc_stderr": 0.03196412734523272, "acc_norm": 0.47346938775510206, "acc_norm_stderr": 0.03196412734523272 }, "harness|hendrycksTest-sociology|5": { "acc": 0.39303482587064675, "acc_stderr": 0.0345368246603156, "acc_norm": 0.39303482587064675, "acc_norm_stderr": 0.0345368246603156 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.61, "acc_stderr": 0.04902071300001974, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001974 }, "harness|hendrycksTest-virology|5": { "acc": 0.3433734939759036, "acc_stderr": 0.03696584317010601, "acc_norm": 0.3433734939759036, "acc_norm_stderr": 0.03696584317010601 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.5847953216374269, "acc_stderr": 0.037792759455032014, "acc_norm": 0.5847953216374269, "acc_norm_stderr": 0.037792759455032014 }, "harness|truthfulqa:mc|0": { "mc1": 0.35862913096695226, "mc1_stderr": 0.016789289499502022, "mc2": 0.5108336746233818, "mc2_stderr": 0.015741003892075174 }, "harness|winogrande|5": { "acc": 0.6708760852407262, "acc_stderr": 0.013206387089091458 }, "harness|gsm8k|5": { "acc": 0.06595905989385899, "acc_stderr": 0.006836951192034193 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_martyn__mistral-megamerge-dare-7b
[ "region:us" ]
2023-12-16T17:01:57+00:00
{"pretty_name": "Evaluation run of martyn/mistral-megamerge-dare-7b", "dataset_summary": "Dataset automatically created during the evaluation run of model [martyn/mistral-megamerge-dare-7b](https://huggingface.co/martyn/mistral-megamerge-dare-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_martyn__mistral-megamerge-dare-7b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T16:59:07.341646](https://huggingface.co/datasets/open-llm-leaderboard/details_martyn__mistral-megamerge-dare-7b/blob/main/results_2023-12-16T16-59-07.341646.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.43110066802732633,\n \"acc_stderr\": 0.034328754146029136,\n \"acc_norm\": 0.43723096690924246,\n \"acc_norm_stderr\": 0.035144012683790145,\n \"mc1\": 0.35862913096695226,\n \"mc1_stderr\": 0.016789289499502022,\n \"mc2\": 0.5108336746233818,\n \"mc2_stderr\": 0.015741003892075174\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5136518771331058,\n \"acc_stderr\": 0.014605943429860945,\n \"acc_norm\": 0.552901023890785,\n \"acc_norm_stderr\": 0.014529380160526854\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5077673770165305,\n \"acc_stderr\": 0.004989179286677388,\n \"acc_norm\": 0.7048396733718383,\n \"acc_norm_stderr\": 0.0045518262729780596\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4740740740740741,\n \"acc_stderr\": 0.04313531696750574,\n \"acc_norm\": 0.4740740740740741,\n \"acc_norm_stderr\": 0.04313531696750574\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.4934210526315789,\n \"acc_stderr\": 0.04068590050224971,\n \"acc_norm\": 0.4934210526315789,\n \"acc_norm_stderr\": 0.04068590050224971\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5471698113207547,\n \"acc_stderr\": 0.030635627957961816,\n \"acc_norm\": 0.5471698113207547,\n \"acc_norm_stderr\": 0.030635627957961816\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.4583333333333333,\n \"acc_stderr\": 0.04166666666666666,\n \"acc_norm\": 0.4583333333333333,\n \"acc_norm_stderr\": 0.04166666666666666\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816505,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816505\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.4624277456647399,\n \"acc_stderr\": 0.0380168510452446,\n \"acc_norm\": 0.4624277456647399,\n \"acc_norm_stderr\": 0.0380168510452446\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3235294117647059,\n \"acc_stderr\": 0.046550104113196177,\n \"acc_norm\": 0.3235294117647059,\n \"acc_norm_stderr\": 0.046550104113196177\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.3702127659574468,\n \"acc_stderr\": 0.03156564682236784,\n \"acc_norm\": 0.3702127659574468,\n \"acc_norm_stderr\": 0.03156564682236784\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2807017543859649,\n \"acc_stderr\": 0.042270544512321984,\n \"acc_norm\": 0.2807017543859649,\n \"acc_norm_stderr\": 0.042270544512321984\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.38620689655172413,\n \"acc_stderr\": 0.04057324734419036,\n \"acc_norm\": 0.38620689655172413,\n \"acc_norm_stderr\": 0.04057324734419036\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.29365079365079366,\n \"acc_stderr\": 0.023456037383982026,\n \"acc_norm\": 0.29365079365079366,\n \"acc_norm_stderr\": 0.023456037383982026\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.24603174603174602,\n \"acc_stderr\": 0.03852273364924315,\n \"acc_norm\": 0.24603174603174602,\n \"acc_norm_stderr\": 0.03852273364924315\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.3903225806451613,\n \"acc_stderr\": 0.027751256636969576,\n \"acc_norm\": 0.3903225806451613,\n \"acc_norm_stderr\": 0.027751256636969576\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.32019704433497537,\n \"acc_stderr\": 0.032826493853041504,\n \"acc_norm\": 0.32019704433497537,\n \"acc_norm_stderr\": 0.032826493853041504\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.2606060606060606,\n \"acc_stderr\": 0.03427743175816524,\n \"acc_norm\": 0.2606060606060606,\n \"acc_norm_stderr\": 0.03427743175816524\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.5555555555555556,\n \"acc_stderr\": 0.035402943770953675,\n \"acc_norm\": 0.5555555555555556,\n \"acc_norm_stderr\": 0.035402943770953675\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.6528497409326425,\n \"acc_stderr\": 0.03435696168361355,\n \"acc_norm\": 0.6528497409326425,\n \"acc_norm_stderr\": 0.03435696168361355\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.40512820512820513,\n \"acc_stderr\": 0.024890471769938145,\n \"acc_norm\": 0.40512820512820513,\n \"acc_norm_stderr\": 0.024890471769938145\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2851851851851852,\n \"acc_stderr\": 0.027528599210340492,\n \"acc_norm\": 0.2851851851851852,\n \"acc_norm_stderr\": 0.027528599210340492\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.4369747899159664,\n \"acc_stderr\": 0.03221943636566197,\n \"acc_norm\": 0.4369747899159664,\n \"acc_norm_stderr\": 0.03221943636566197\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2913907284768212,\n \"acc_stderr\": 0.03710185726119995,\n \"acc_norm\": 0.2913907284768212,\n \"acc_norm_stderr\": 0.03710185726119995\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.5926605504587156,\n \"acc_stderr\": 0.021065986244412895,\n \"acc_norm\": 0.5926605504587156,\n \"acc_norm_stderr\": 0.021065986244412895\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.42592592592592593,\n \"acc_stderr\": 0.03372343271653063,\n \"acc_norm\": 0.42592592592592593,\n \"acc_norm_stderr\": 0.03372343271653063\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.37745098039215685,\n \"acc_stderr\": 0.03402272044340705,\n \"acc_norm\": 0.37745098039215685,\n \"acc_norm_stderr\": 0.03402272044340705\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.5189873417721519,\n \"acc_stderr\": 0.03252375148090448,\n \"acc_norm\": 0.5189873417721519,\n \"acc_norm_stderr\": 0.03252375148090448\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.5112107623318386,\n \"acc_stderr\": 0.033549366530984746,\n \"acc_norm\": 0.5112107623318386,\n \"acc_norm_stderr\": 0.033549366530984746\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.45038167938931295,\n \"acc_stderr\": 0.04363643698524779,\n \"acc_norm\": 0.45038167938931295,\n \"acc_norm_stderr\": 0.04363643698524779\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6033057851239669,\n \"acc_stderr\": 0.04465869780531009,\n \"acc_norm\": 0.6033057851239669,\n \"acc_norm_stderr\": 0.04465869780531009\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.5277777777777778,\n \"acc_stderr\": 0.048262172941398944,\n \"acc_norm\": 0.5277777777777778,\n \"acc_norm_stderr\": 0.048262172941398944\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.4539877300613497,\n \"acc_stderr\": 0.0391170190467718,\n \"acc_norm\": 0.4539877300613497,\n \"acc_norm_stderr\": 0.0391170190467718\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.41964285714285715,\n \"acc_stderr\": 0.04684099321077106,\n \"acc_norm\": 0.41964285714285715,\n \"acc_norm_stderr\": 0.04684099321077106\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6116504854368932,\n \"acc_stderr\": 0.04825729337356389,\n \"acc_norm\": 0.6116504854368932,\n \"acc_norm_stderr\": 0.04825729337356389\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.030882736974138656,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.030882736974138656\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.04975698519562427,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.04975698519562427\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6015325670498084,\n \"acc_stderr\": 0.017507438602777408,\n \"acc_norm\": 0.6015325670498084,\n \"acc_norm_stderr\": 0.017507438602777408\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.407514450867052,\n \"acc_stderr\": 0.0264545781469315,\n \"acc_norm\": 0.407514450867052,\n \"acc_norm_stderr\": 0.0264545781469315\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3229050279329609,\n \"acc_stderr\": 0.01563844038024149,\n \"acc_norm\": 0.3229050279329609,\n \"acc_norm_stderr\": 0.01563844038024149\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.46405228758169936,\n \"acc_stderr\": 0.028555827516528777,\n \"acc_norm\": 0.46405228758169936,\n \"acc_norm_stderr\": 0.028555827516528777\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.4694533762057878,\n \"acc_stderr\": 0.02834504586484068,\n \"acc_norm\": 0.4694533762057878,\n \"acc_norm_stderr\": 0.02834504586484068\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.48148148148148145,\n \"acc_stderr\": 0.027801656212323674,\n \"acc_norm\": 0.48148148148148145,\n \"acc_norm_stderr\": 0.027801656212323674\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.3049645390070922,\n \"acc_stderr\": 0.027464708442022128,\n \"acc_norm\": 0.3049645390070922,\n \"acc_norm_stderr\": 0.027464708442022128\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2685788787483703,\n \"acc_stderr\": 0.011320056629121741,\n \"acc_norm\": 0.2685788787483703,\n \"acc_norm_stderr\": 0.011320056629121741\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.39705882352941174,\n \"acc_stderr\": 0.029722152099280072,\n \"acc_norm\": 0.39705882352941174,\n \"acc_norm_stderr\": 0.029722152099280072\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.40032679738562094,\n \"acc_stderr\": 0.01982184368827177,\n \"acc_norm\": 0.40032679738562094,\n \"acc_norm_stderr\": 0.01982184368827177\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5727272727272728,\n \"acc_stderr\": 0.04738198703545483,\n \"acc_norm\": 0.5727272727272728,\n \"acc_norm_stderr\": 0.04738198703545483\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.47346938775510206,\n \"acc_stderr\": 0.03196412734523272,\n \"acc_norm\": 0.47346938775510206,\n \"acc_norm_stderr\": 0.03196412734523272\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.39303482587064675,\n \"acc_stderr\": 0.0345368246603156,\n \"acc_norm\": 0.39303482587064675,\n \"acc_norm_stderr\": 0.0345368246603156\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001974,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001974\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3433734939759036,\n \"acc_stderr\": 0.03696584317010601,\n \"acc_norm\": 0.3433734939759036,\n \"acc_norm_stderr\": 0.03696584317010601\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.5847953216374269,\n \"acc_stderr\": 0.037792759455032014,\n \"acc_norm\": 0.5847953216374269,\n \"acc_norm_stderr\": 0.037792759455032014\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.35862913096695226,\n \"mc1_stderr\": 0.016789289499502022,\n \"mc2\": 0.5108336746233818,\n \"mc2_stderr\": 0.015741003892075174\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6708760852407262,\n \"acc_stderr\": 0.013206387089091458\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.06595905989385899,\n \"acc_stderr\": 0.006836951192034193\n }\n}\n```", "repo_url": "https://huggingface.co/martyn/mistral-megamerge-dare-7b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|arc:challenge|25_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|gsm8k|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hellaswag|10_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T16-59-07.341646.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["**/details_harness|winogrande|5_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T16-59-07.341646.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T16_59_07.341646", "path": ["results_2023-12-16T16-59-07.341646.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T16-59-07.341646.parquet"]}]}]}
2023-12-16T17:02:40+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of martyn/mistral-megamerge-dare-7b Dataset automatically created during the evaluation run of model martyn/mistral-megamerge-dare-7b on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T16:59:07.341646(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of martyn/mistral-megamerge-dare-7b\n\n\n\nDataset automatically created during the evaluation run of model martyn/mistral-megamerge-dare-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T16:59:07.341646(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of martyn/mistral-megamerge-dare-7b\n\n\n\nDataset automatically created during the evaluation run of model martyn/mistral-megamerge-dare-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T16:59:07.341646(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 187, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of martyn/mistral-megamerge-dare-7b\n\n\n\nDataset automatically created during the evaluation run of model martyn/mistral-megamerge-dare-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T16:59:07.341646(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
c4693efbe808754fb99f3d182c20a6c4f87b3cb6
# Dataset Card for Evaluation run of mncai/mistral-7b-dpo-v5 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [mncai/mistral-7b-dpo-v5](https://huggingface.co/mncai/mistral-7b-dpo-v5) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_mncai__mistral-7b-dpo-v5", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T17:06:29.601004](https://huggingface.co/datasets/open-llm-leaderboard/details_mncai__mistral-7b-dpo-v5/blob/main/results_2023-12-16T17-06-29.601004.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6444771644137858, "acc_stderr": 0.032189712118428256, "acc_norm": 0.6439145631454195, "acc_norm_stderr": 0.03285791543982518, "mc1": 0.5385556915544676, "mc1_stderr": 0.017451384104637452, "mc2": 0.6686090881936299, "mc2_stderr": 0.015322918299770005 }, "harness|arc:challenge|25": { "acc": 0.6911262798634812, "acc_stderr": 0.013501770929344, "acc_norm": 0.7201365187713311, "acc_norm_stderr": 0.01311904089772592 }, "harness|hellaswag|10": { "acc": 0.6978689504082852, "acc_stderr": 0.004582433109636476, "acc_norm": 0.8757219677355108, "acc_norm_stderr": 0.003292242543637345 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6370370370370371, "acc_stderr": 0.04153948404742398, "acc_norm": 0.6370370370370371, "acc_norm_stderr": 0.04153948404742398 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6907894736842105, "acc_stderr": 0.037610708698674805, "acc_norm": 0.6907894736842105, "acc_norm_stderr": 0.037610708698674805 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6943396226415094, "acc_stderr": 0.028353298073322666, "acc_norm": 0.6943396226415094, "acc_norm_stderr": 0.028353298073322666 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7361111111111112, "acc_stderr": 0.03685651095897532, "acc_norm": 0.7361111111111112, "acc_norm_stderr": 0.03685651095897532 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6705202312138728, "acc_stderr": 0.03583901754736412, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.03583901754736412 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4117647058823529, "acc_stderr": 0.048971049527263666, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.048971049527263666 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.78, "acc_stderr": 0.04163331998932263, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932263 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5531914893617021, "acc_stderr": 0.0325005368436584, "acc_norm": 0.5531914893617021, "acc_norm_stderr": 0.0325005368436584 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.47368421052631576, "acc_stderr": 0.046970851366478626, "acc_norm": 0.47368421052631576, "acc_norm_stderr": 0.046970851366478626 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5724137931034483, "acc_stderr": 0.04122737111370332, "acc_norm": 0.5724137931034483, "acc_norm_stderr": 0.04122737111370332 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42063492063492064, "acc_stderr": 0.025424835086923996, "acc_norm": 0.42063492063492064, "acc_norm_stderr": 0.025424835086923996 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4365079365079365, "acc_stderr": 0.04435932892851466, "acc_norm": 0.4365079365079365, "acc_norm_stderr": 0.04435932892851466 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7935483870967742, "acc_stderr": 0.023025899617188716, "acc_norm": 0.7935483870967742, "acc_norm_stderr": 0.023025899617188716 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.49261083743842365, "acc_stderr": 0.035176035403610084, "acc_norm": 0.49261083743842365, "acc_norm_stderr": 0.035176035403610084 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7696969696969697, "acc_stderr": 0.0328766675860349, "acc_norm": 0.7696969696969697, "acc_norm_stderr": 0.0328766675860349 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7929292929292929, "acc_stderr": 0.028869778460267042, "acc_norm": 0.7929292929292929, "acc_norm_stderr": 0.028869778460267042 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8808290155440415, "acc_stderr": 0.02338193534812143, "acc_norm": 0.8808290155440415, "acc_norm_stderr": 0.02338193534812143 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6487179487179487, "acc_stderr": 0.024203665177902803, "acc_norm": 0.6487179487179487, "acc_norm_stderr": 0.024203665177902803 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3037037037037037, "acc_stderr": 0.028037929969114993, "acc_norm": 0.3037037037037037, "acc_norm_stderr": 0.028037929969114993 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.634453781512605, "acc_stderr": 0.031282177063684614, "acc_norm": 0.634453781512605, "acc_norm_stderr": 0.031282177063684614 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31125827814569534, "acc_stderr": 0.03780445850526732, "acc_norm": 0.31125827814569534, "acc_norm_stderr": 0.03780445850526732 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8403669724770643, "acc_stderr": 0.015703498348461766, "acc_norm": 0.8403669724770643, "acc_norm_stderr": 0.015703498348461766 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.49537037037037035, "acc_stderr": 0.03409825519163572, "acc_norm": 0.49537037037037035, "acc_norm_stderr": 0.03409825519163572 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8088235294117647, "acc_stderr": 0.027599174300640766, "acc_norm": 0.8088235294117647, "acc_norm_stderr": 0.027599174300640766 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7763713080168776, "acc_stderr": 0.027123298205229962, "acc_norm": 0.7763713080168776, "acc_norm_stderr": 0.027123298205229962 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.695067264573991, "acc_stderr": 0.030898610882477515, "acc_norm": 0.695067264573991, "acc_norm_stderr": 0.030898610882477515 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7786259541984732, "acc_stderr": 0.03641297081313729, "acc_norm": 0.7786259541984732, "acc_norm_stderr": 0.03641297081313729 }, "harness|hendrycksTest-international_law|5": { "acc": 0.768595041322314, "acc_stderr": 0.038498560987940904, "acc_norm": 0.768595041322314, "acc_norm_stderr": 0.038498560987940904 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7592592592592593, "acc_stderr": 0.04133119440243839, "acc_norm": 0.7592592592592593, "acc_norm_stderr": 0.04133119440243839 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7484662576687117, "acc_stderr": 0.03408997886857529, "acc_norm": 0.7484662576687117, "acc_norm_stderr": 0.03408997886857529 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4642857142857143, "acc_stderr": 0.04733667890053756, "acc_norm": 0.4642857142857143, "acc_norm_stderr": 0.04733667890053756 }, "harness|hendrycksTest-management|5": { "acc": 0.7864077669902912, "acc_stderr": 0.040580420156460344, "acc_norm": 0.7864077669902912, "acc_norm_stderr": 0.040580420156460344 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8846153846153846, "acc_stderr": 0.020930193185179326, "acc_norm": 0.8846153846153846, "acc_norm_stderr": 0.020930193185179326 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.68, "acc_stderr": 0.046882617226215034, "acc_norm": 0.68, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8301404853128991, "acc_stderr": 0.013428186370608311, "acc_norm": 0.8301404853128991, "acc_norm_stderr": 0.013428186370608311 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7398843930635838, "acc_stderr": 0.023618678310069356, "acc_norm": 0.7398843930635838, "acc_norm_stderr": 0.023618678310069356 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4480446927374302, "acc_stderr": 0.016631976628930595, "acc_norm": 0.4480446927374302, "acc_norm_stderr": 0.016631976628930595 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7189542483660131, "acc_stderr": 0.025738854797818737, "acc_norm": 0.7189542483660131, "acc_norm_stderr": 0.025738854797818737 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7138263665594855, "acc_stderr": 0.025670259242188933, "acc_norm": 0.7138263665594855, "acc_norm_stderr": 0.025670259242188933 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.75, "acc_stderr": 0.02409347123262133, "acc_norm": 0.75, "acc_norm_stderr": 0.02409347123262133 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4858156028368794, "acc_stderr": 0.02981549448368206, "acc_norm": 0.4858156028368794, "acc_norm_stderr": 0.02981549448368206 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46479791395045633, "acc_stderr": 0.012738547371303957, "acc_norm": 0.46479791395045633, "acc_norm_stderr": 0.012738547371303957 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6544117647058824, "acc_stderr": 0.02888819310398863, "acc_norm": 0.6544117647058824, "acc_norm_stderr": 0.02888819310398863 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6617647058823529, "acc_stderr": 0.01913994374848704, "acc_norm": 0.6617647058823529, "acc_norm_stderr": 0.01913994374848704 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6545454545454545, "acc_stderr": 0.04554619617541054, "acc_norm": 0.6545454545454545, "acc_norm_stderr": 0.04554619617541054 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7183673469387755, "acc_stderr": 0.028795185574291293, "acc_norm": 0.7183673469387755, "acc_norm_stderr": 0.028795185574291293 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8407960199004975, "acc_stderr": 0.025870646766169146, "acc_norm": 0.8407960199004975, "acc_norm_stderr": 0.025870646766169146 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.89, "acc_stderr": 0.03144660377352202, "acc_norm": 0.89, "acc_norm_stderr": 0.03144660377352202 }, "harness|hendrycksTest-virology|5": { "acc": 0.5481927710843374, "acc_stderr": 0.03874371556587953, "acc_norm": 0.5481927710843374, "acc_norm_stderr": 0.03874371556587953 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8245614035087719, "acc_stderr": 0.029170885500727665, "acc_norm": 0.8245614035087719, "acc_norm_stderr": 0.029170885500727665 }, "harness|truthfulqa:mc|0": { "mc1": 0.5385556915544676, "mc1_stderr": 0.017451384104637452, "mc2": 0.6686090881936299, "mc2_stderr": 0.015322918299770005 }, "harness|winogrande|5": { "acc": 0.8224151539068666, "acc_stderr": 0.010740676861359237 }, "harness|gsm8k|5": { "acc": 0.7065959059893859, "acc_stderr": 0.012541830815461487 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_mncai__mistral-7b-dpo-v5
[ "region:us" ]
2023-12-16T17:09:18+00:00
{"pretty_name": "Evaluation run of mncai/mistral-7b-dpo-v5", "dataset_summary": "Dataset automatically created during the evaluation run of model [mncai/mistral-7b-dpo-v5](https://huggingface.co/mncai/mistral-7b-dpo-v5) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_mncai__mistral-7b-dpo-v5\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T17:06:29.601004](https://huggingface.co/datasets/open-llm-leaderboard/details_mncai__mistral-7b-dpo-v5/blob/main/results_2023-12-16T17-06-29.601004.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6444771644137858,\n \"acc_stderr\": 0.032189712118428256,\n \"acc_norm\": 0.6439145631454195,\n \"acc_norm_stderr\": 0.03285791543982518,\n \"mc1\": 0.5385556915544676,\n \"mc1_stderr\": 0.017451384104637452,\n \"mc2\": 0.6686090881936299,\n \"mc2_stderr\": 0.015322918299770005\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6911262798634812,\n \"acc_stderr\": 0.013501770929344,\n \"acc_norm\": 0.7201365187713311,\n \"acc_norm_stderr\": 0.01311904089772592\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6978689504082852,\n \"acc_stderr\": 0.004582433109636476,\n \"acc_norm\": 0.8757219677355108,\n \"acc_norm_stderr\": 0.003292242543637345\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6370370370370371,\n \"acc_stderr\": 0.04153948404742398,\n \"acc_norm\": 0.6370370370370371,\n \"acc_norm_stderr\": 0.04153948404742398\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6907894736842105,\n \"acc_stderr\": 0.037610708698674805,\n \"acc_norm\": 0.6907894736842105,\n \"acc_norm_stderr\": 0.037610708698674805\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6943396226415094,\n \"acc_stderr\": 0.028353298073322666,\n \"acc_norm\": 0.6943396226415094,\n \"acc_norm_stderr\": 0.028353298073322666\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7361111111111112,\n \"acc_stderr\": 0.03685651095897532,\n \"acc_norm\": 0.7361111111111112,\n \"acc_norm_stderr\": 0.03685651095897532\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.03583901754736412,\n \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.03583901754736412\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.048971049527263666,\n \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.048971049527263666\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932263,\n \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.04163331998932263\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5531914893617021,\n \"acc_stderr\": 0.0325005368436584,\n \"acc_norm\": 0.5531914893617021,\n \"acc_norm_stderr\": 0.0325005368436584\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.47368421052631576,\n \"acc_stderr\": 0.046970851366478626,\n \"acc_norm\": 0.47368421052631576,\n \"acc_norm_stderr\": 0.046970851366478626\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5724137931034483,\n \"acc_stderr\": 0.04122737111370332,\n \"acc_norm\": 0.5724137931034483,\n \"acc_norm_stderr\": 0.04122737111370332\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42063492063492064,\n \"acc_stderr\": 0.025424835086923996,\n \"acc_norm\": 0.42063492063492064,\n \"acc_norm_stderr\": 0.025424835086923996\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4365079365079365,\n \"acc_stderr\": 0.04435932892851466,\n \"acc_norm\": 0.4365079365079365,\n \"acc_norm_stderr\": 0.04435932892851466\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252604,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252604\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7935483870967742,\n \"acc_stderr\": 0.023025899617188716,\n \"acc_norm\": 0.7935483870967742,\n \"acc_norm_stderr\": 0.023025899617188716\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.49261083743842365,\n \"acc_stderr\": 0.035176035403610084,\n \"acc_norm\": 0.49261083743842365,\n \"acc_norm_stderr\": 0.035176035403610084\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7696969696969697,\n \"acc_stderr\": 0.0328766675860349,\n \"acc_norm\": 0.7696969696969697,\n \"acc_norm_stderr\": 0.0328766675860349\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7929292929292929,\n \"acc_stderr\": 0.028869778460267042,\n \"acc_norm\": 0.7929292929292929,\n \"acc_norm_stderr\": 0.028869778460267042\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8808290155440415,\n \"acc_stderr\": 0.02338193534812143,\n \"acc_norm\": 0.8808290155440415,\n \"acc_norm_stderr\": 0.02338193534812143\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6487179487179487,\n \"acc_stderr\": 0.024203665177902803,\n \"acc_norm\": 0.6487179487179487,\n \"acc_norm_stderr\": 0.024203665177902803\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3037037037037037,\n \"acc_stderr\": 0.028037929969114993,\n \"acc_norm\": 0.3037037037037037,\n \"acc_norm_stderr\": 0.028037929969114993\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.634453781512605,\n \"acc_stderr\": 0.031282177063684614,\n \"acc_norm\": 0.634453781512605,\n \"acc_norm_stderr\": 0.031282177063684614\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31125827814569534,\n \"acc_stderr\": 0.03780445850526732,\n \"acc_norm\": 0.31125827814569534,\n \"acc_norm_stderr\": 0.03780445850526732\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8403669724770643,\n \"acc_stderr\": 0.015703498348461766,\n \"acc_norm\": 0.8403669724770643,\n \"acc_norm_stderr\": 0.015703498348461766\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.49537037037037035,\n \"acc_stderr\": 0.03409825519163572,\n \"acc_norm\": 0.49537037037037035,\n \"acc_norm_stderr\": 0.03409825519163572\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8088235294117647,\n \"acc_stderr\": 0.027599174300640766,\n \"acc_norm\": 0.8088235294117647,\n \"acc_norm_stderr\": 0.027599174300640766\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7763713080168776,\n \"acc_stderr\": 0.027123298205229962,\n \"acc_norm\": 0.7763713080168776,\n \"acc_norm_stderr\": 0.027123298205229962\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.695067264573991,\n \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7786259541984732,\n \"acc_stderr\": 0.03641297081313729,\n \"acc_norm\": 0.7786259541984732,\n \"acc_norm_stderr\": 0.03641297081313729\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.768595041322314,\n \"acc_stderr\": 0.038498560987940904,\n \"acc_norm\": 0.768595041322314,\n \"acc_norm_stderr\": 0.038498560987940904\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7592592592592593,\n \"acc_stderr\": 0.04133119440243839,\n \"acc_norm\": 0.7592592592592593,\n \"acc_norm_stderr\": 0.04133119440243839\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7484662576687117,\n \"acc_stderr\": 0.03408997886857529,\n \"acc_norm\": 0.7484662576687117,\n \"acc_norm_stderr\": 0.03408997886857529\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7864077669902912,\n \"acc_stderr\": 0.040580420156460344,\n \"acc_norm\": 0.7864077669902912,\n \"acc_norm_stderr\": 0.040580420156460344\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8846153846153846,\n \"acc_stderr\": 0.020930193185179326,\n \"acc_norm\": 0.8846153846153846,\n \"acc_norm_stderr\": 0.020930193185179326\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8301404853128991,\n \"acc_stderr\": 0.013428186370608311,\n \"acc_norm\": 0.8301404853128991,\n \"acc_norm_stderr\": 0.013428186370608311\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7398843930635838,\n \"acc_stderr\": 0.023618678310069356,\n \"acc_norm\": 0.7398843930635838,\n \"acc_norm_stderr\": 0.023618678310069356\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4480446927374302,\n \"acc_stderr\": 0.016631976628930595,\n \"acc_norm\": 0.4480446927374302,\n \"acc_norm_stderr\": 0.016631976628930595\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7189542483660131,\n \"acc_stderr\": 0.025738854797818737,\n \"acc_norm\": 0.7189542483660131,\n \"acc_norm_stderr\": 0.025738854797818737\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7138263665594855,\n \"acc_stderr\": 0.025670259242188933,\n \"acc_norm\": 0.7138263665594855,\n \"acc_norm_stderr\": 0.025670259242188933\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.02409347123262133,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.02409347123262133\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4858156028368794,\n \"acc_stderr\": 0.02981549448368206,\n \"acc_norm\": 0.4858156028368794,\n \"acc_norm_stderr\": 0.02981549448368206\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46479791395045633,\n \"acc_stderr\": 0.012738547371303957,\n \"acc_norm\": 0.46479791395045633,\n \"acc_norm_stderr\": 0.012738547371303957\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6544117647058824,\n \"acc_stderr\": 0.02888819310398863,\n \"acc_norm\": 0.6544117647058824,\n \"acc_norm_stderr\": 0.02888819310398863\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6617647058823529,\n \"acc_stderr\": 0.01913994374848704,\n \"acc_norm\": 0.6617647058823529,\n \"acc_norm_stderr\": 0.01913994374848704\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.6545454545454545,\n \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7183673469387755,\n \"acc_stderr\": 0.028795185574291293,\n \"acc_norm\": 0.7183673469387755,\n \"acc_norm_stderr\": 0.028795185574291293\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.025870646766169146,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.025870646766169146\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.89,\n \"acc_stderr\": 0.03144660377352202,\n \"acc_norm\": 0.89,\n \"acc_norm_stderr\": 0.03144660377352202\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5481927710843374,\n \"acc_stderr\": 0.03874371556587953,\n \"acc_norm\": 0.5481927710843374,\n \"acc_norm_stderr\": 0.03874371556587953\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8245614035087719,\n \"acc_stderr\": 0.029170885500727665,\n \"acc_norm\": 0.8245614035087719,\n \"acc_norm_stderr\": 0.029170885500727665\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5385556915544676,\n \"mc1_stderr\": 0.017451384104637452,\n \"mc2\": 0.6686090881936299,\n \"mc2_stderr\": 0.015322918299770005\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8224151539068666,\n \"acc_stderr\": 0.010740676861359237\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7065959059893859,\n \"acc_stderr\": 0.012541830815461487\n }\n}\n```", "repo_url": "https://huggingface.co/mncai/mistral-7b-dpo-v5", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|arc:challenge|25_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|gsm8k|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hellaswag|10_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T17-06-29.601004.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["**/details_harness|winogrande|5_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T17-06-29.601004.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T17_06_29.601004", "path": ["results_2023-12-16T17-06-29.601004.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T17-06-29.601004.parquet"]}]}]}
2023-12-16T17:10:00+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of mncai/mistral-7b-dpo-v5 Dataset automatically created during the evaluation run of model mncai/mistral-7b-dpo-v5 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T17:06:29.601004(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of mncai/mistral-7b-dpo-v5\n\n\n\nDataset automatically created during the evaluation run of model mncai/mistral-7b-dpo-v5 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T17:06:29.601004(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of mncai/mistral-7b-dpo-v5\n\n\n\nDataset automatically created during the evaluation run of model mncai/mistral-7b-dpo-v5 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T17:06:29.601004(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 189, 66, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of mncai/mistral-7b-dpo-v5\n\n\n\nDataset automatically created during the evaluation run of model mncai/mistral-7b-dpo-v5 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T17:06:29.601004(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
2e2d1dc8ec0e3395f0c2776ef507ae83690b1349
# Dataset Card for Evaluation run of vihangd/dopeyshearedplats-2.7b-v1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [vihangd/dopeyshearedplats-2.7b-v1](https://huggingface.co/vihangd/dopeyshearedplats-2.7b-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_vihangd__dopeyshearedplats-2.7b-v1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T17:10:33.730644](https://huggingface.co/datasets/open-llm-leaderboard/details_vihangd__dopeyshearedplats-2.7b-v1/blob/main/results_2023-12-16T17-10-33.730644.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.29757038455080786, "acc_stderr": 0.032057229895416275, "acc_norm": 0.30080870787584507, "acc_norm_stderr": 0.03287733985701745, "mc1": 0.28151774785801714, "mc1_stderr": 0.01574402724825605, "mc2": 0.44123500676119165, "mc2_stderr": 0.015794257230996155 }, "harness|arc:challenge|25": { "acc": 0.4121160409556314, "acc_stderr": 0.014383915302225396, "acc_norm": 0.46075085324232085, "acc_norm_stderr": 0.014566303676636588 }, "harness|hellaswag|10": { "acc": 0.5739892451702848, "acc_stderr": 0.0049348468098272, "acc_norm": 0.7517426807408882, "acc_norm_stderr": 0.00431118988223835 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.37037037037037035, "acc_stderr": 0.04171654161354543, "acc_norm": 0.37037037037037035, "acc_norm_stderr": 0.04171654161354543 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.23684210526315788, "acc_stderr": 0.034597776068105365, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.034597776068105365 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.30566037735849055, "acc_stderr": 0.028353298073322663, "acc_norm": 0.30566037735849055, "acc_norm_stderr": 0.028353298073322663 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.3402777777777778, "acc_stderr": 0.03962135573486219, "acc_norm": 0.3402777777777778, "acc_norm_stderr": 0.03962135573486219 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.26, "acc_stderr": 0.04408440022768078, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.23121387283236994, "acc_stderr": 0.032147373020294696, "acc_norm": 0.23121387283236994, "acc_norm_stderr": 0.032147373020294696 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.1568627450980392, "acc_stderr": 0.036186648199362466, "acc_norm": 0.1568627450980392, "acc_norm_stderr": 0.036186648199362466 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.42, "acc_stderr": 0.04960449637488584, "acc_norm": 0.42, "acc_norm_stderr": 0.04960449637488584 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.3148936170212766, "acc_stderr": 0.03036358219723817, "acc_norm": 0.3148936170212766, "acc_norm_stderr": 0.03036358219723817 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.22807017543859648, "acc_stderr": 0.03947152782669415, "acc_norm": 0.22807017543859648, "acc_norm_stderr": 0.03947152782669415 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2896551724137931, "acc_stderr": 0.037800192304380135, "acc_norm": 0.2896551724137931, "acc_norm_stderr": 0.037800192304380135 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.26455026455026454, "acc_stderr": 0.022717467897708624, "acc_norm": 0.26455026455026454, "acc_norm_stderr": 0.022717467897708624 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.18253968253968253, "acc_stderr": 0.03455071019102148, "acc_norm": 0.18253968253968253, "acc_norm_stderr": 0.03455071019102148 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.27741935483870966, "acc_stderr": 0.025470196835900055, "acc_norm": 0.27741935483870966, "acc_norm_stderr": 0.025470196835900055 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.27586206896551724, "acc_stderr": 0.031447125816782405, "acc_norm": 0.27586206896551724, "acc_norm_stderr": 0.031447125816782405 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.296969696969697, "acc_stderr": 0.035679697722680474, "acc_norm": 0.296969696969697, "acc_norm_stderr": 0.035679697722680474 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.29292929292929293, "acc_stderr": 0.032424979581788166, "acc_norm": 0.29292929292929293, "acc_norm_stderr": 0.032424979581788166 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.29533678756476683, "acc_stderr": 0.03292296639155141, "acc_norm": 0.29533678756476683, "acc_norm_stderr": 0.03292296639155141 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.2641025641025641, "acc_stderr": 0.02235219373745326, "acc_norm": 0.2641025641025641, "acc_norm_stderr": 0.02235219373745326 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.24444444444444444, "acc_stderr": 0.02620276653465215, "acc_norm": 0.24444444444444444, "acc_norm_stderr": 0.02620276653465215 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.2689075630252101, "acc_stderr": 0.028801392193631273, "acc_norm": 0.2689075630252101, "acc_norm_stderr": 0.028801392193631273 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.18543046357615894, "acc_stderr": 0.031732843842942865, "acc_norm": 0.18543046357615894, "acc_norm_stderr": 0.031732843842942865 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.28440366972477066, "acc_stderr": 0.019342036587702602, "acc_norm": 0.28440366972477066, "acc_norm_stderr": 0.019342036587702602 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.21296296296296297, "acc_stderr": 0.02792096314799366, "acc_norm": 0.21296296296296297, "acc_norm_stderr": 0.02792096314799366 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.27450980392156865, "acc_stderr": 0.031321798030832904, "acc_norm": 0.27450980392156865, "acc_norm_stderr": 0.031321798030832904 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.29535864978902954, "acc_stderr": 0.029696338713422882, "acc_norm": 0.29535864978902954, "acc_norm_stderr": 0.029696338713422882 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.27802690582959644, "acc_stderr": 0.030069584874494053, "acc_norm": 0.27802690582959644, "acc_norm_stderr": 0.030069584874494053 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2824427480916031, "acc_stderr": 0.03948406125768361, "acc_norm": 0.2824427480916031, "acc_norm_stderr": 0.03948406125768361 }, "harness|hendrycksTest-international_law|5": { "acc": 0.4793388429752066, "acc_stderr": 0.04560456086387235, "acc_norm": 0.4793388429752066, "acc_norm_stderr": 0.04560456086387235 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.3333333333333333, "acc_stderr": 0.04557239513497751, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.04557239513497751 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.3558282208588957, "acc_stderr": 0.03761521380046734, "acc_norm": 0.3558282208588957, "acc_norm_stderr": 0.03761521380046734 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.29464285714285715, "acc_stderr": 0.0432704093257873, "acc_norm": 0.29464285714285715, "acc_norm_stderr": 0.0432704093257873 }, "harness|hendrycksTest-management|5": { "acc": 0.27184466019417475, "acc_stderr": 0.044052680241409216, "acc_norm": 0.27184466019417475, "acc_norm_stderr": 0.044052680241409216 }, "harness|hendrycksTest-marketing|5": { "acc": 0.32905982905982906, "acc_stderr": 0.030782321577688156, "acc_norm": 0.32905982905982906, "acc_norm_stderr": 0.030782321577688156 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.38697318007662834, "acc_stderr": 0.017417138059440136, "acc_norm": 0.38697318007662834, "acc_norm_stderr": 0.017417138059440136 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.28901734104046245, "acc_stderr": 0.02440517393578323, "acc_norm": 0.28901734104046245, "acc_norm_stderr": 0.02440517393578323 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2547486033519553, "acc_stderr": 0.014572650383409155, "acc_norm": 0.2547486033519553, "acc_norm_stderr": 0.014572650383409155 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.28104575163398693, "acc_stderr": 0.02573885479781873, "acc_norm": 0.28104575163398693, "acc_norm_stderr": 0.02573885479781873 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.31189710610932475, "acc_stderr": 0.02631185807185415, "acc_norm": 0.31189710610932475, "acc_norm_stderr": 0.02631185807185415 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.345679012345679, "acc_stderr": 0.026462487777001886, "acc_norm": 0.345679012345679, "acc_norm_stderr": 0.026462487777001886 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2978723404255319, "acc_stderr": 0.027281608344469414, "acc_norm": 0.2978723404255319, "acc_norm_stderr": 0.027281608344469414 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.27835723598435463, "acc_stderr": 0.011446990197380985, "acc_norm": 0.27835723598435463, "acc_norm_stderr": 0.011446990197380985 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.17647058823529413, "acc_stderr": 0.02315746830855935, "acc_norm": 0.17647058823529413, "acc_norm_stderr": 0.02315746830855935 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.3104575163398693, "acc_stderr": 0.018718067052623216, "acc_norm": 0.3104575163398693, "acc_norm_stderr": 0.018718067052623216 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.2909090909090909, "acc_stderr": 0.04350271442923243, "acc_norm": 0.2909090909090909, "acc_norm_stderr": 0.04350271442923243 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.2571428571428571, "acc_stderr": 0.02797982353874455, "acc_norm": 0.2571428571428571, "acc_norm_stderr": 0.02797982353874455 }, "harness|hendrycksTest-sociology|5": { "acc": 0.3333333333333333, "acc_stderr": 0.033333333333333354, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.033333333333333354 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-virology|5": { "acc": 0.27710843373493976, "acc_stderr": 0.03484331592680588, "acc_norm": 0.27710843373493976, "acc_norm_stderr": 0.03484331592680588 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.38011695906432746, "acc_stderr": 0.03722965741385539, "acc_norm": 0.38011695906432746, "acc_norm_stderr": 0.03722965741385539 }, "harness|truthfulqa:mc|0": { "mc1": 0.28151774785801714, "mc1_stderr": 0.01574402724825605, "mc2": 0.44123500676119165, "mc2_stderr": 0.015794257230996155 }, "harness|winogrande|5": { "acc": 0.6266771902131019, "acc_stderr": 0.013594002763035518 }, "harness|gsm8k|5": { "acc": 0.0037907505686125853, "acc_stderr": 0.0016927007401501904 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_vihangd__dopeyshearedplats-2.7b-v1
[ "region:us" ]
2023-12-16T17:13:30+00:00
{"pretty_name": "Evaluation run of vihangd/dopeyshearedplats-2.7b-v1", "dataset_summary": "Dataset automatically created during the evaluation run of model [vihangd/dopeyshearedplats-2.7b-v1](https://huggingface.co/vihangd/dopeyshearedplats-2.7b-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_vihangd__dopeyshearedplats-2.7b-v1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T17:10:33.730644](https://huggingface.co/datasets/open-llm-leaderboard/details_vihangd__dopeyshearedplats-2.7b-v1/blob/main/results_2023-12-16T17-10-33.730644.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.29757038455080786,\n \"acc_stderr\": 0.032057229895416275,\n \"acc_norm\": 0.30080870787584507,\n \"acc_norm_stderr\": 0.03287733985701745,\n \"mc1\": 0.28151774785801714,\n \"mc1_stderr\": 0.01574402724825605,\n \"mc2\": 0.44123500676119165,\n \"mc2_stderr\": 0.015794257230996155\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.4121160409556314,\n \"acc_stderr\": 0.014383915302225396,\n \"acc_norm\": 0.46075085324232085,\n \"acc_norm_stderr\": 0.014566303676636588\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5739892451702848,\n \"acc_stderr\": 0.0049348468098272,\n \"acc_norm\": 0.7517426807408882,\n \"acc_norm_stderr\": 0.00431118988223835\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.37037037037037035,\n \"acc_stderr\": 0.04171654161354543,\n \"acc_norm\": 0.37037037037037035,\n \"acc_norm_stderr\": 0.04171654161354543\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.23684210526315788,\n \"acc_stderr\": 0.034597776068105365,\n \"acc_norm\": 0.23684210526315788,\n \"acc_norm_stderr\": 0.034597776068105365\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.30566037735849055,\n \"acc_stderr\": 0.028353298073322663,\n \"acc_norm\": 0.30566037735849055,\n \"acc_norm_stderr\": 0.028353298073322663\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.3402777777777778,\n \"acc_stderr\": 0.03962135573486219,\n \"acc_norm\": 0.3402777777777778,\n \"acc_norm_stderr\": 0.03962135573486219\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.23121387283236994,\n \"acc_stderr\": 0.032147373020294696,\n \"acc_norm\": 0.23121387283236994,\n \"acc_norm_stderr\": 0.032147373020294696\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.1568627450980392,\n \"acc_stderr\": 0.036186648199362466,\n \"acc_norm\": 0.1568627450980392,\n \"acc_norm_stderr\": 0.036186648199362466\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.04960449637488584,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.04960449637488584\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.3148936170212766,\n \"acc_stderr\": 0.03036358219723817,\n \"acc_norm\": 0.3148936170212766,\n \"acc_norm_stderr\": 0.03036358219723817\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.22807017543859648,\n \"acc_stderr\": 0.03947152782669415,\n \"acc_norm\": 0.22807017543859648,\n \"acc_norm_stderr\": 0.03947152782669415\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2896551724137931,\n \"acc_stderr\": 0.037800192304380135,\n \"acc_norm\": 0.2896551724137931,\n \"acc_norm_stderr\": 0.037800192304380135\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.26455026455026454,\n \"acc_stderr\": 0.022717467897708624,\n \"acc_norm\": 0.26455026455026454,\n \"acc_norm_stderr\": 0.022717467897708624\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.18253968253968253,\n \"acc_stderr\": 0.03455071019102148,\n \"acc_norm\": 0.18253968253968253,\n \"acc_norm_stderr\": 0.03455071019102148\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.0446196043338474,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.27741935483870966,\n \"acc_stderr\": 0.025470196835900055,\n \"acc_norm\": 0.27741935483870966,\n \"acc_norm_stderr\": 0.025470196835900055\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.27586206896551724,\n \"acc_stderr\": 0.031447125816782405,\n \"acc_norm\": 0.27586206896551724,\n \"acc_norm_stderr\": 0.031447125816782405\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.296969696969697,\n \"acc_stderr\": 0.035679697722680474,\n \"acc_norm\": 0.296969696969697,\n \"acc_norm_stderr\": 0.035679697722680474\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.29292929292929293,\n \"acc_stderr\": 0.032424979581788166,\n \"acc_norm\": 0.29292929292929293,\n \"acc_norm_stderr\": 0.032424979581788166\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.29533678756476683,\n \"acc_stderr\": 0.03292296639155141,\n \"acc_norm\": 0.29533678756476683,\n \"acc_norm_stderr\": 0.03292296639155141\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.2641025641025641,\n \"acc_stderr\": 0.02235219373745326,\n \"acc_norm\": 0.2641025641025641,\n \"acc_norm_stderr\": 0.02235219373745326\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.24444444444444444,\n \"acc_stderr\": 0.02620276653465215,\n \"acc_norm\": 0.24444444444444444,\n \"acc_norm_stderr\": 0.02620276653465215\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.2689075630252101,\n \"acc_stderr\": 0.028801392193631273,\n \"acc_norm\": 0.2689075630252101,\n \"acc_norm_stderr\": 0.028801392193631273\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.18543046357615894,\n \"acc_stderr\": 0.031732843842942865,\n \"acc_norm\": 0.18543046357615894,\n \"acc_norm_stderr\": 0.031732843842942865\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.28440366972477066,\n \"acc_stderr\": 0.019342036587702602,\n \"acc_norm\": 0.28440366972477066,\n \"acc_norm_stderr\": 0.019342036587702602\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.21296296296296297,\n \"acc_stderr\": 0.02792096314799366,\n \"acc_norm\": 0.21296296296296297,\n \"acc_norm_stderr\": 0.02792096314799366\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.27450980392156865,\n \"acc_stderr\": 0.031321798030832904,\n \"acc_norm\": 0.27450980392156865,\n \"acc_norm_stderr\": 0.031321798030832904\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.29535864978902954,\n \"acc_stderr\": 0.029696338713422882,\n \"acc_norm\": 0.29535864978902954,\n \"acc_norm_stderr\": 0.029696338713422882\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.27802690582959644,\n \"acc_stderr\": 0.030069584874494053,\n \"acc_norm\": 0.27802690582959644,\n \"acc_norm_stderr\": 0.030069584874494053\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.2824427480916031,\n \"acc_stderr\": 0.03948406125768361,\n \"acc_norm\": 0.2824427480916031,\n \"acc_norm_stderr\": 0.03948406125768361\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.4793388429752066,\n \"acc_stderr\": 0.04560456086387235,\n \"acc_norm\": 0.4793388429752066,\n \"acc_norm_stderr\": 0.04560456086387235\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.04557239513497751,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.04557239513497751\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.3558282208588957,\n \"acc_stderr\": 0.03761521380046734,\n \"acc_norm\": 0.3558282208588957,\n \"acc_norm_stderr\": 0.03761521380046734\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.29464285714285715,\n \"acc_stderr\": 0.0432704093257873,\n \"acc_norm\": 0.29464285714285715,\n \"acc_norm_stderr\": 0.0432704093257873\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.27184466019417475,\n \"acc_stderr\": 0.044052680241409216,\n \"acc_norm\": 0.27184466019417475,\n \"acc_norm_stderr\": 0.044052680241409216\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.32905982905982906,\n \"acc_stderr\": 0.030782321577688156,\n \"acc_norm\": 0.32905982905982906,\n \"acc_norm_stderr\": 0.030782321577688156\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.38697318007662834,\n \"acc_stderr\": 0.017417138059440136,\n \"acc_norm\": 0.38697318007662834,\n \"acc_norm_stderr\": 0.017417138059440136\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.28901734104046245,\n \"acc_stderr\": 0.02440517393578323,\n \"acc_norm\": 0.28901734104046245,\n \"acc_norm_stderr\": 0.02440517393578323\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2547486033519553,\n \"acc_stderr\": 0.014572650383409155,\n \"acc_norm\": 0.2547486033519553,\n \"acc_norm_stderr\": 0.014572650383409155\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.28104575163398693,\n \"acc_stderr\": 0.02573885479781873,\n \"acc_norm\": 0.28104575163398693,\n \"acc_norm_stderr\": 0.02573885479781873\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.31189710610932475,\n \"acc_stderr\": 0.02631185807185415,\n \"acc_norm\": 0.31189710610932475,\n \"acc_norm_stderr\": 0.02631185807185415\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.345679012345679,\n \"acc_stderr\": 0.026462487777001886,\n \"acc_norm\": 0.345679012345679,\n \"acc_norm_stderr\": 0.026462487777001886\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.2978723404255319,\n \"acc_stderr\": 0.027281608344469414,\n \"acc_norm\": 0.2978723404255319,\n \"acc_norm_stderr\": 0.027281608344469414\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.27835723598435463,\n \"acc_stderr\": 0.011446990197380985,\n \"acc_norm\": 0.27835723598435463,\n \"acc_norm_stderr\": 0.011446990197380985\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.17647058823529413,\n \"acc_stderr\": 0.02315746830855935,\n \"acc_norm\": 0.17647058823529413,\n \"acc_norm_stderr\": 0.02315746830855935\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.3104575163398693,\n \"acc_stderr\": 0.018718067052623216,\n \"acc_norm\": 0.3104575163398693,\n \"acc_norm_stderr\": 0.018718067052623216\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.2909090909090909,\n \"acc_stderr\": 0.04350271442923243,\n \"acc_norm\": 0.2909090909090909,\n \"acc_norm_stderr\": 0.04350271442923243\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.2571428571428571,\n \"acc_stderr\": 0.02797982353874455,\n \"acc_norm\": 0.2571428571428571,\n \"acc_norm_stderr\": 0.02797982353874455\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.033333333333333354,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.033333333333333354\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.27710843373493976,\n \"acc_stderr\": 0.03484331592680588,\n \"acc_norm\": 0.27710843373493976,\n \"acc_norm_stderr\": 0.03484331592680588\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.38011695906432746,\n \"acc_stderr\": 0.03722965741385539,\n \"acc_norm\": 0.38011695906432746,\n \"acc_norm_stderr\": 0.03722965741385539\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.28151774785801714,\n \"mc1_stderr\": 0.01574402724825605,\n \"mc2\": 0.44123500676119165,\n \"mc2_stderr\": 0.015794257230996155\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6266771902131019,\n \"acc_stderr\": 0.013594002763035518\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0037907505686125853,\n \"acc_stderr\": 0.0016927007401501904\n }\n}\n```", "repo_url": "https://huggingface.co/vihangd/dopeyshearedplats-2.7b-v1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|arc:challenge|25_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|gsm8k|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hellaswag|10_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T17-10-33.730644.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["**/details_harness|winogrande|5_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T17-10-33.730644.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T17_10_33.730644", "path": ["results_2023-12-16T17-10-33.730644.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T17-10-33.730644.parquet"]}]}]}
2023-12-16T17:14:09+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of vihangd/dopeyshearedplats-2.7b-v1 Dataset automatically created during the evaluation run of model vihangd/dopeyshearedplats-2.7b-v1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T17:10:33.730644(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of vihangd/dopeyshearedplats-2.7b-v1\n\n\n\nDataset automatically created during the evaluation run of model vihangd/dopeyshearedplats-2.7b-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T17:10:33.730644(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of vihangd/dopeyshearedplats-2.7b-v1\n\n\n\nDataset automatically created during the evaluation run of model vihangd/dopeyshearedplats-2.7b-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T17:10:33.730644(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 195, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of vihangd/dopeyshearedplats-2.7b-v1\n\n\n\nDataset automatically created during the evaluation run of model vihangd/dopeyshearedplats-2.7b-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T17:10:33.730644(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
0992a8422ef4c466f3c219a52fcd5b90301edfe0
# Вопросы к психологу и ответы от психологов с сайта [psiholog.ru](https://www.psiholog.ru) Данные актуальны на 2023-12-16. Парсер, с помощью которого получили датасет, можно найти в [этом репозитории](https://github.com/rogozinushka/psychologist_answers_parser) Датафрейм имеет такую структуру: - url - ссылка на вопрос - question_name - заголовок вопроса - question_body - подробный вопрос - answers - ответы психологов |url|question_name|question_body|answers| |---|---|---|---| |https://psiholog.ru/vopros/89|Как избавиться от страха и депрессии после цыганского гипноза?|спрашивает: Марина (Казань)Здравствуйте!Вчера подверглась цыганскому гипнозу..отдала им деньги,не понимаю как.произошло всё на работе(работаю продавцом не первый год).теперь остался только страх и опустошение..не знаю,как с этим справиться.ощущение,что схожу с ума и что все вокруг меня теперь считают немного сумашедшей.я как загнанная в клетку и до сих пор ощущаю присутствие этих цыганок(они работали вдвоём)|['Добрый вечер, Марина!<br>Ситация, произошедшая с Вами очень не приятная. И усугубляется тем, что Вы, видимо остались еще и должны? Если состояние еще актуально, то хорошо бы пройти антикризисную психотерапию.<br>Для информации: Цыганский гипноз хорошо описан как феномен у Милтона Эриксона (эрисонианский или эриксоновский гипноз) и осуществляется в рамках НЛП. И то, что с Вами произошло - это чистого вида манипуляция. Вам хорошо бы познать методы манипуляций, чтобы впредь чувствовать их и не поддаваться.<br>С позитивной точки зрения ситуацию лучше воспринять как урок. Кстати, эту технику (не всегда в чистом виде) используют в тренингах продаж и в профессиональном плане Вам эти знания могут пригодиться в будущем.']"| # Questions and answers from [psiholog.ru](https://www.psiholog.ru) site The data is current for 2023-12-16. The parser used to get the dataset can be found in [this repository] (https://github.com/rogozinushka/psychologist_answers_parser) Data structure: - url - question url - question_name - question title - question_body - question to psychologist - answers - psychologist answers |url|question_name|question_body|answers| |---|---|---|---| |https://psiholog.ru/vopros/89|Как избавиться от страха и депрессии после цыганского гипноза?|спрашивает: Марина (Казань)Здравствуйте!Вчера подверглась цыганскому гипнозу..отдала им деньги,не понимаю как.произошло всё на работе(работаю продавцом не первый год).теперь остался только страх и опустошение..не знаю,как с этим справиться.ощущение,что схожу с ума и что все вокруг меня теперь считают немного сумашедшей.я как загнанная в клетку и до сих пор ощущаю присутствие этих цыганок(они работали вдвоём)|['Добрый вечер, Марина!<br>Ситация, произошедшая с Вами очень не приятная. И усугубляется тем, что Вы, видимо остались еще и должны? Если состояние еще актуально, то хорошо бы пройти антикризисную психотерапию.<br>Для информации: Цыганский гипноз хорошо описан как феномен у Милтона Эриксона (эрисонианский или эриксоновский гипноз) и осуществляется в рамках НЛП. И то, что с Вами произошло - это чистого вида манипуляция. Вам хорошо бы познать методы манипуляций, чтобы впредь чувствовать их и не поддаваться.<br>С позитивной точки зрения ситуацию лучше воспринять как урок. Кстати, эту технику (не всегда в чистом виде) используют в тренингах продаж и в профессиональном плане Вам эти знания могут пригодиться в будущем.']"|
rogozinushka/psychologist_answers
[ "language:ru", "region:us" ]
2023-12-16T17:21:00+00:00
{"language": ["ru"]}
2023-12-16T17:26:04+00:00
[]
[ "ru" ]
TAGS #language-Russian #region-us
Вопросы к психологу и ответы от психологов с сайта URL ====================================================== Данные актуальны на 2023-12-16. Парсер, с помощью которого получили датасет, можно найти в этом репозитории Датафрейм имеет такую структуру: * url - ссылка на вопрос * question\_name - заголовок вопроса * question\_body - подробный вопрос * answers - ответы психологов Questions and answers from URL site =================================== The data is current for 2023-12-16. The parser used to get the dataset can be found in [this repository] (URL Data structure: * url - question url * question\_name - question title * question\_body - question to psychologist * answers - psychologist answers
[]
[ "TAGS\n#language-Russian #region-us \n" ]
[ 11 ]
[ "passage: TAGS\n#language-Russian #region-us \n" ]
945ec445c428c13f2c4fd5612df304b966cf4329
# Dataset Card for Evaluation run of Mihaiii/Metis-0.1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Mihaiii/Metis-0.1](https://huggingface.co/Mihaiii/Metis-0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Mihaiii__Metis-0.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T17:19:36.124293](https://huggingface.co/datasets/open-llm-leaderboard/details_Mihaiii__Metis-0.1/blob/main/results_2023-12-16T17-19-36.124293.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6116901101476386, "acc_stderr": 0.032847781505921096, "acc_norm": 0.6176101128662862, "acc_norm_stderr": 0.033521021839066995, "mc1": 0.30966952264381886, "mc1_stderr": 0.01618574435514491, "mc2": 0.4523612102104448, "mc2_stderr": 0.014929233225423213 }, "harness|arc:challenge|25": { "acc": 0.5665529010238908, "acc_stderr": 0.014481376224558902, "acc_norm": 0.6015358361774744, "acc_norm_stderr": 0.014306946052735567 }, "harness|hellaswag|10": { "acc": 0.6328420633339972, "acc_stderr": 0.004810449343572396, "acc_norm": 0.8285202150965943, "acc_norm_stderr": 0.003761573360614847 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.046482319871173156, "acc_norm": 0.31, "acc_norm_stderr": 0.046482319871173156 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6, "acc_stderr": 0.04232073695151589, "acc_norm": 0.6, "acc_norm_stderr": 0.04232073695151589 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.625, "acc_stderr": 0.039397364351956274, "acc_norm": 0.625, "acc_norm_stderr": 0.039397364351956274 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.59, "acc_stderr": 0.04943110704237102, "acc_norm": 0.59, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6641509433962264, "acc_stderr": 0.029067220146644826, "acc_norm": 0.6641509433962264, "acc_norm_stderr": 0.029067220146644826 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7291666666666666, "acc_stderr": 0.03716177437566017, "acc_norm": 0.7291666666666666, "acc_norm_stderr": 0.03716177437566017 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5838150289017341, "acc_stderr": 0.03758517775404947, "acc_norm": 0.5838150289017341, "acc_norm_stderr": 0.03758517775404947 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.37254901960784315, "acc_stderr": 0.04810840148082637, "acc_norm": 0.37254901960784315, "acc_norm_stderr": 0.04810840148082637 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.79, "acc_stderr": 0.04093601807403326, "acc_norm": 0.79, "acc_norm_stderr": 0.04093601807403326 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5659574468085107, "acc_stderr": 0.03240038086792747, "acc_norm": 0.5659574468085107, "acc_norm_stderr": 0.03240038086792747 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.39473684210526316, "acc_stderr": 0.045981880578165414, "acc_norm": 0.39473684210526316, "acc_norm_stderr": 0.045981880578165414 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5862068965517241, "acc_stderr": 0.04104269211806232, "acc_norm": 0.5862068965517241, "acc_norm_stderr": 0.04104269211806232 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.41798941798941797, "acc_stderr": 0.025402555503260912, "acc_norm": 0.41798941798941797, "acc_norm_stderr": 0.025402555503260912 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.373015873015873, "acc_stderr": 0.04325506042017086, "acc_norm": 0.373015873015873, "acc_norm_stderr": 0.04325506042017086 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7580645161290323, "acc_stderr": 0.024362599693031093, "acc_norm": 0.7580645161290323, "acc_norm_stderr": 0.024362599693031093 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5172413793103449, "acc_stderr": 0.035158955511656986, "acc_norm": 0.5172413793103449, "acc_norm_stderr": 0.035158955511656986 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.68, "acc_stderr": 0.04688261722621505, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7636363636363637, "acc_stderr": 0.03317505930009181, "acc_norm": 0.7636363636363637, "acc_norm_stderr": 0.03317505930009181 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7777777777777778, "acc_stderr": 0.029620227874790482, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.029620227874790482 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8082901554404145, "acc_stderr": 0.028408953626245282, "acc_norm": 0.8082901554404145, "acc_norm_stderr": 0.028408953626245282 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5692307692307692, "acc_stderr": 0.025106820660539757, "acc_norm": 0.5692307692307692, "acc_norm_stderr": 0.025106820660539757 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.31851851851851853, "acc_stderr": 0.02840653309060846, "acc_norm": 0.31851851851851853, "acc_norm_stderr": 0.02840653309060846 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.634453781512605, "acc_stderr": 0.031282177063684614, "acc_norm": 0.634453781512605, "acc_norm_stderr": 0.031282177063684614 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.038020397601079024, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.038020397601079024 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7871559633027523, "acc_stderr": 0.017549376389313694, "acc_norm": 0.7871559633027523, "acc_norm_stderr": 0.017549376389313694 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4537037037037037, "acc_stderr": 0.03395322726375797, "acc_norm": 0.4537037037037037, "acc_norm_stderr": 0.03395322726375797 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7941176470588235, "acc_stderr": 0.028379449451588667, "acc_norm": 0.7941176470588235, "acc_norm_stderr": 0.028379449451588667 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7510548523206751, "acc_stderr": 0.028146970599422644, "acc_norm": 0.7510548523206751, "acc_norm_stderr": 0.028146970599422644 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6860986547085202, "acc_stderr": 0.031146796482972465, "acc_norm": 0.6860986547085202, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7099236641221374, "acc_stderr": 0.03980066246467766, "acc_norm": 0.7099236641221374, "acc_norm_stderr": 0.03980066246467766 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8016528925619835, "acc_stderr": 0.03640118271990947, "acc_norm": 0.8016528925619835, "acc_norm_stderr": 0.03640118271990947 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7407407407407407, "acc_stderr": 0.04236511258094633, "acc_norm": 0.7407407407407407, "acc_norm_stderr": 0.04236511258094633 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7484662576687117, "acc_stderr": 0.03408997886857529, "acc_norm": 0.7484662576687117, "acc_norm_stderr": 0.03408997886857529 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.42857142857142855, "acc_stderr": 0.04697113923010212, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.04697113923010212 }, "harness|hendrycksTest-management|5": { "acc": 0.7475728155339806, "acc_stderr": 0.04301250399690878, "acc_norm": 0.7475728155339806, "acc_norm_stderr": 0.04301250399690878 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8632478632478633, "acc_stderr": 0.02250903393707779, "acc_norm": 0.8632478632478633, "acc_norm_stderr": 0.02250903393707779 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.68, "acc_stderr": 0.04688261722621505, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7982120051085568, "acc_stderr": 0.014351702181636863, "acc_norm": 0.7982120051085568, "acc_norm_stderr": 0.014351702181636863 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6907514450867052, "acc_stderr": 0.02488314057007176, "acc_norm": 0.6907514450867052, "acc_norm_stderr": 0.02488314057007176 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.31731843575418994, "acc_stderr": 0.01556639263005703, "acc_norm": 0.31731843575418994, "acc_norm_stderr": 0.01556639263005703 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6928104575163399, "acc_stderr": 0.02641560191438899, "acc_norm": 0.6928104575163399, "acc_norm_stderr": 0.02641560191438899 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7009646302250804, "acc_stderr": 0.02600330111788514, "acc_norm": 0.7009646302250804, "acc_norm_stderr": 0.02600330111788514 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6790123456790124, "acc_stderr": 0.025976566010862734, "acc_norm": 0.6790123456790124, "acc_norm_stderr": 0.025976566010862734 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.45390070921985815, "acc_stderr": 0.029700453247291463, "acc_norm": 0.45390070921985815, "acc_norm_stderr": 0.029700453247291463 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4282920469361147, "acc_stderr": 0.01263822388031317, "acc_norm": 0.4282920469361147, "acc_norm_stderr": 0.01263822388031317 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6323529411764706, "acc_stderr": 0.02928941340940319, "acc_norm": 0.6323529411764706, "acc_norm_stderr": 0.02928941340940319 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6388888888888888, "acc_stderr": 0.01943177567703731, "acc_norm": 0.6388888888888888, "acc_norm_stderr": 0.01943177567703731 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6545454545454545, "acc_stderr": 0.04554619617541054, "acc_norm": 0.6545454545454545, "acc_norm_stderr": 0.04554619617541054 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7020408163265306, "acc_stderr": 0.029279567411065674, "acc_norm": 0.7020408163265306, "acc_norm_stderr": 0.029279567411065674 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8407960199004975, "acc_stderr": 0.025870646766169146, "acc_norm": 0.8407960199004975, "acc_norm_stderr": 0.025870646766169146 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.82, "acc_stderr": 0.038612291966536934, "acc_norm": 0.82, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-virology|5": { "acc": 0.5120481927710844, "acc_stderr": 0.03891364495835817, "acc_norm": 0.5120481927710844, "acc_norm_stderr": 0.03891364495835817 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8070175438596491, "acc_stderr": 0.030267457554898458, "acc_norm": 0.8070175438596491, "acc_norm_stderr": 0.030267457554898458 }, "harness|truthfulqa:mc|0": { "mc1": 0.30966952264381886, "mc1_stderr": 0.01618574435514491, "mc2": 0.4523612102104448, "mc2_stderr": 0.014929233225423213 }, "harness|winogrande|5": { "acc": 0.7726913970007893, "acc_stderr": 0.011778612167091087 }, "harness|gsm8k|5": { "acc": 0.33206974981046244, "acc_stderr": 0.012972465034361867 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Mihaiii__Metis-0.1
[ "region:us" ]
2023-12-16T17:22:25+00:00
{"pretty_name": "Evaluation run of Mihaiii/Metis-0.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [Mihaiii/Metis-0.1](https://huggingface.co/Mihaiii/Metis-0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Mihaiii__Metis-0.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T17:19:36.124293](https://huggingface.co/datasets/open-llm-leaderboard/details_Mihaiii__Metis-0.1/blob/main/results_2023-12-16T17-19-36.124293.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6116901101476386,\n \"acc_stderr\": 0.032847781505921096,\n \"acc_norm\": 0.6176101128662862,\n \"acc_norm_stderr\": 0.033521021839066995,\n \"mc1\": 0.30966952264381886,\n \"mc1_stderr\": 0.01618574435514491,\n \"mc2\": 0.4523612102104448,\n \"mc2_stderr\": 0.014929233225423213\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5665529010238908,\n \"acc_stderr\": 0.014481376224558902,\n \"acc_norm\": 0.6015358361774744,\n \"acc_norm_stderr\": 0.014306946052735567\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6328420633339972,\n \"acc_stderr\": 0.004810449343572396,\n \"acc_norm\": 0.8285202150965943,\n \"acc_norm_stderr\": 0.003761573360614847\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.046482319871173156,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.046482319871173156\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04232073695151589,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04232073695151589\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.625,\n \"acc_stderr\": 0.039397364351956274,\n \"acc_norm\": 0.625,\n \"acc_norm_stderr\": 0.039397364351956274\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.59,\n \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6641509433962264,\n \"acc_stderr\": 0.029067220146644826,\n \"acc_norm\": 0.6641509433962264,\n \"acc_norm_stderr\": 0.029067220146644826\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7291666666666666,\n \"acc_stderr\": 0.03716177437566017,\n \"acc_norm\": 0.7291666666666666,\n \"acc_norm_stderr\": 0.03716177437566017\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5838150289017341,\n \"acc_stderr\": 0.03758517775404947,\n \"acc_norm\": 0.5838150289017341,\n \"acc_norm_stderr\": 0.03758517775404947\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.37254901960784315,\n \"acc_stderr\": 0.04810840148082637,\n \"acc_norm\": 0.37254901960784315,\n \"acc_norm_stderr\": 0.04810840148082637\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.04093601807403326,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.04093601807403326\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5659574468085107,\n \"acc_stderr\": 0.03240038086792747,\n \"acc_norm\": 0.5659574468085107,\n \"acc_norm_stderr\": 0.03240038086792747\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.39473684210526316,\n \"acc_stderr\": 0.045981880578165414,\n \"acc_norm\": 0.39473684210526316,\n \"acc_norm_stderr\": 0.045981880578165414\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5862068965517241,\n \"acc_stderr\": 0.04104269211806232,\n \"acc_norm\": 0.5862068965517241,\n \"acc_norm_stderr\": 0.04104269211806232\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41798941798941797,\n \"acc_stderr\": 0.025402555503260912,\n \"acc_norm\": 0.41798941798941797,\n \"acc_norm_stderr\": 0.025402555503260912\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.373015873015873,\n \"acc_stderr\": 0.04325506042017086,\n \"acc_norm\": 0.373015873015873,\n \"acc_norm_stderr\": 0.04325506042017086\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7580645161290323,\n \"acc_stderr\": 0.024362599693031093,\n \"acc_norm\": 0.7580645161290323,\n \"acc_norm_stderr\": 0.024362599693031093\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5172413793103449,\n \"acc_stderr\": 0.035158955511656986,\n \"acc_norm\": 0.5172413793103449,\n \"acc_norm_stderr\": 0.035158955511656986\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7636363636363637,\n \"acc_stderr\": 0.03317505930009181,\n \"acc_norm\": 0.7636363636363637,\n \"acc_norm_stderr\": 0.03317505930009181\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.029620227874790482,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.029620227874790482\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8082901554404145,\n \"acc_stderr\": 0.028408953626245282,\n \"acc_norm\": 0.8082901554404145,\n \"acc_norm_stderr\": 0.028408953626245282\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5692307692307692,\n \"acc_stderr\": 0.025106820660539757,\n \"acc_norm\": 0.5692307692307692,\n \"acc_norm_stderr\": 0.025106820660539757\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.31851851851851853,\n \"acc_stderr\": 0.02840653309060846,\n \"acc_norm\": 0.31851851851851853,\n \"acc_norm_stderr\": 0.02840653309060846\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.634453781512605,\n \"acc_stderr\": 0.031282177063684614,\n \"acc_norm\": 0.634453781512605,\n \"acc_norm_stderr\": 0.031282177063684614\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31788079470198677,\n \"acc_stderr\": 0.038020397601079024,\n \"acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.038020397601079024\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7871559633027523,\n \"acc_stderr\": 0.017549376389313694,\n \"acc_norm\": 0.7871559633027523,\n \"acc_norm_stderr\": 0.017549376389313694\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4537037037037037,\n \"acc_stderr\": 0.03395322726375797,\n \"acc_norm\": 0.4537037037037037,\n \"acc_norm_stderr\": 0.03395322726375797\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7941176470588235,\n \"acc_stderr\": 0.028379449451588667,\n \"acc_norm\": 0.7941176470588235,\n \"acc_norm_stderr\": 0.028379449451588667\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7510548523206751,\n \"acc_stderr\": 0.028146970599422644,\n \"acc_norm\": 0.7510548523206751,\n \"acc_norm_stderr\": 0.028146970599422644\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6860986547085202,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.6860986547085202,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7099236641221374,\n \"acc_stderr\": 0.03980066246467766,\n \"acc_norm\": 0.7099236641221374,\n \"acc_norm_stderr\": 0.03980066246467766\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8016528925619835,\n \"acc_stderr\": 0.03640118271990947,\n \"acc_norm\": 0.8016528925619835,\n \"acc_norm_stderr\": 0.03640118271990947\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7407407407407407,\n \"acc_stderr\": 0.04236511258094633,\n \"acc_norm\": 0.7407407407407407,\n \"acc_norm_stderr\": 0.04236511258094633\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7484662576687117,\n \"acc_stderr\": 0.03408997886857529,\n \"acc_norm\": 0.7484662576687117,\n \"acc_norm_stderr\": 0.03408997886857529\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.04697113923010212,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.04697113923010212\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7475728155339806,\n \"acc_stderr\": 0.04301250399690878,\n \"acc_norm\": 0.7475728155339806,\n \"acc_norm_stderr\": 0.04301250399690878\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8632478632478633,\n \"acc_stderr\": 0.02250903393707779,\n \"acc_norm\": 0.8632478632478633,\n \"acc_norm_stderr\": 0.02250903393707779\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7982120051085568,\n \"acc_stderr\": 0.014351702181636863,\n \"acc_norm\": 0.7982120051085568,\n \"acc_norm_stderr\": 0.014351702181636863\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6907514450867052,\n \"acc_stderr\": 0.02488314057007176,\n \"acc_norm\": 0.6907514450867052,\n \"acc_norm_stderr\": 0.02488314057007176\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.31731843575418994,\n \"acc_stderr\": 0.01556639263005703,\n \"acc_norm\": 0.31731843575418994,\n \"acc_norm_stderr\": 0.01556639263005703\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6928104575163399,\n \"acc_stderr\": 0.02641560191438899,\n \"acc_norm\": 0.6928104575163399,\n \"acc_norm_stderr\": 0.02641560191438899\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7009646302250804,\n \"acc_stderr\": 0.02600330111788514,\n \"acc_norm\": 0.7009646302250804,\n \"acc_norm_stderr\": 0.02600330111788514\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6790123456790124,\n \"acc_stderr\": 0.025976566010862734,\n \"acc_norm\": 0.6790123456790124,\n \"acc_norm_stderr\": 0.025976566010862734\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.45390070921985815,\n \"acc_stderr\": 0.029700453247291463,\n \"acc_norm\": 0.45390070921985815,\n \"acc_norm_stderr\": 0.029700453247291463\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4282920469361147,\n \"acc_stderr\": 0.01263822388031317,\n \"acc_norm\": 0.4282920469361147,\n \"acc_norm_stderr\": 0.01263822388031317\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6323529411764706,\n \"acc_stderr\": 0.02928941340940319,\n \"acc_norm\": 0.6323529411764706,\n \"acc_norm_stderr\": 0.02928941340940319\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6388888888888888,\n \"acc_stderr\": 0.01943177567703731,\n \"acc_norm\": 0.6388888888888888,\n \"acc_norm_stderr\": 0.01943177567703731\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.6545454545454545,\n \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7020408163265306,\n \"acc_stderr\": 0.029279567411065674,\n \"acc_norm\": 0.7020408163265306,\n \"acc_norm_stderr\": 0.029279567411065674\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.025870646766169146,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.025870646766169146\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.82,\n \"acc_stderr\": 0.038612291966536934,\n \"acc_norm\": 0.82,\n \"acc_norm_stderr\": 0.038612291966536934\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5120481927710844,\n \"acc_stderr\": 0.03891364495835817,\n \"acc_norm\": 0.5120481927710844,\n \"acc_norm_stderr\": 0.03891364495835817\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8070175438596491,\n \"acc_stderr\": 0.030267457554898458,\n \"acc_norm\": 0.8070175438596491,\n \"acc_norm_stderr\": 0.030267457554898458\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.30966952264381886,\n \"mc1_stderr\": 0.01618574435514491,\n \"mc2\": 0.4523612102104448,\n \"mc2_stderr\": 0.014929233225423213\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7726913970007893,\n \"acc_stderr\": 0.011778612167091087\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.33206974981046244,\n \"acc_stderr\": 0.012972465034361867\n }\n}\n```", "repo_url": "https://huggingface.co/Mihaiii/Metis-0.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|arc:challenge|25_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|gsm8k|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hellaswag|10_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T17-19-36.124293.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["**/details_harness|winogrande|5_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T17-19-36.124293.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T17_19_36.124293", "path": ["results_2023-12-16T17-19-36.124293.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T17-19-36.124293.parquet"]}]}]}
2023-12-16T17:23:07+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Mihaiii/Metis-0.1 Dataset automatically created during the evaluation run of model Mihaiii/Metis-0.1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T17:19:36.124293(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Mihaiii/Metis-0.1\n\n\n\nDataset automatically created during the evaluation run of model Mihaiii/Metis-0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T17:19:36.124293(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Mihaiii/Metis-0.1\n\n\n\nDataset automatically created during the evaluation run of model Mihaiii/Metis-0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T17:19:36.124293(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 175, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Mihaiii/Metis-0.1\n\n\n\nDataset automatically created during the evaluation run of model Mihaiii/Metis-0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T17:19:36.124293(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
569eedf5ffcda5f0079fc01566745876000aa3f0
# Dataset Card for Evaluation run of KnutJaegersberg/Walter-SOLAR-11B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [KnutJaegersberg/Walter-SOLAR-11B](https://huggingface.co/KnutJaegersberg/Walter-SOLAR-11B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_KnutJaegersberg__Walter-SOLAR-11B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T17:23:07.067772](https://huggingface.co/datasets/open-llm-leaderboard/details_KnutJaegersberg__Walter-SOLAR-11B/blob/main/results_2023-12-16T17-23-07.067772.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6404707310339822, "acc_stderr": 0.0318661190240921, "acc_norm": 0.6524926684204396, "acc_norm_stderr": 0.03268492668160191, "mc1": 0.29253365973072215, "mc1_stderr": 0.015925597445286165, "mc2": 0.4487960219023809, "mc2_stderr": 0.014224892990272523 }, "harness|arc:challenge|25": { "acc": 0.5639931740614335, "acc_stderr": 0.014491225699230918, "acc_norm": 0.6040955631399317, "acc_norm_stderr": 0.01429122839353659 }, "harness|hellaswag|10": { "acc": 0.6549492133041227, "acc_stderr": 0.004744132825391527, "acc_norm": 0.848635729934276, "acc_norm_stderr": 0.003576711065619589 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6, "acc_stderr": 0.04232073695151589, "acc_norm": 0.6, "acc_norm_stderr": 0.04232073695151589 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7368421052631579, "acc_stderr": 0.03583496176361074, "acc_norm": 0.7368421052631579, "acc_norm_stderr": 0.03583496176361074 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.66, "acc_stderr": 0.04760952285695237, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695237 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6754716981132075, "acc_stderr": 0.028815615713432108, "acc_norm": 0.6754716981132075, "acc_norm_stderr": 0.028815615713432108 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7430555555555556, "acc_stderr": 0.03653946969442099, "acc_norm": 0.7430555555555556, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6473988439306358, "acc_stderr": 0.03643037168958548, "acc_norm": 0.6473988439306358, "acc_norm_stderr": 0.03643037168958548 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.04690650298201943, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.04690650298201943 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.574468085106383, "acc_stderr": 0.03232146916224468, "acc_norm": 0.574468085106383, "acc_norm_stderr": 0.03232146916224468 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.43859649122807015, "acc_stderr": 0.04668000738510455, "acc_norm": 0.43859649122807015, "acc_norm_stderr": 0.04668000738510455 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6068965517241379, "acc_stderr": 0.040703290137070705, "acc_norm": 0.6068965517241379, "acc_norm_stderr": 0.040703290137070705 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4312169312169312, "acc_stderr": 0.0255064816981382, "acc_norm": 0.4312169312169312, "acc_norm_stderr": 0.0255064816981382 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4126984126984127, "acc_stderr": 0.04403438954768176, "acc_norm": 0.4126984126984127, "acc_norm_stderr": 0.04403438954768176 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.04793724854411019, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411019 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7838709677419354, "acc_stderr": 0.02341529343356852, "acc_norm": 0.7838709677419354, "acc_norm_stderr": 0.02341529343356852 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5221674876847291, "acc_stderr": 0.03514528562175007, "acc_norm": 0.5221674876847291, "acc_norm_stderr": 0.03514528562175007 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7818181818181819, "acc_stderr": 0.03225078108306289, "acc_norm": 0.7818181818181819, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8282828282828283, "acc_stderr": 0.02686971618742991, "acc_norm": 0.8282828282828283, "acc_norm_stderr": 0.02686971618742991 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8704663212435233, "acc_stderr": 0.024233532297758723, "acc_norm": 0.8704663212435233, "acc_norm_stderr": 0.024233532297758723 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6461538461538462, "acc_stderr": 0.02424378399406216, "acc_norm": 0.6461538461538462, "acc_norm_stderr": 0.02424378399406216 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.37037037037037035, "acc_stderr": 0.02944316932303154, "acc_norm": 0.37037037037037035, "acc_norm_stderr": 0.02944316932303154 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6680672268907563, "acc_stderr": 0.03058869701378364, "acc_norm": 0.6680672268907563, "acc_norm_stderr": 0.03058869701378364 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.40397350993377484, "acc_stderr": 0.040064856853653415, "acc_norm": 0.40397350993377484, "acc_norm_stderr": 0.040064856853653415 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8458715596330275, "acc_stderr": 0.015480826865374296, "acc_norm": 0.8458715596330275, "acc_norm_stderr": 0.015480826865374296 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5601851851851852, "acc_stderr": 0.033851779760448106, "acc_norm": 0.5601851851851852, "acc_norm_stderr": 0.033851779760448106 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8480392156862745, "acc_stderr": 0.025195658428931792, "acc_norm": 0.8480392156862745, "acc_norm_stderr": 0.025195658428931792 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8438818565400844, "acc_stderr": 0.023627159460318667, "acc_norm": 0.8438818565400844, "acc_norm_stderr": 0.023627159460318667 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7130044843049327, "acc_stderr": 0.030360379710291954, "acc_norm": 0.7130044843049327, "acc_norm_stderr": 0.030360379710291954 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7480916030534351, "acc_stderr": 0.03807387116306086, "acc_norm": 0.7480916030534351, "acc_norm_stderr": 0.03807387116306086 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8016528925619835, "acc_stderr": 0.03640118271990945, "acc_norm": 0.8016528925619835, "acc_norm_stderr": 0.03640118271990945 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8055555555555556, "acc_stderr": 0.038260763248848646, "acc_norm": 0.8055555555555556, "acc_norm_stderr": 0.038260763248848646 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6993865030674846, "acc_stderr": 0.03602511318806771, "acc_norm": 0.6993865030674846, "acc_norm_stderr": 0.03602511318806771 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4732142857142857, "acc_stderr": 0.047389751192741546, "acc_norm": 0.4732142857142857, "acc_norm_stderr": 0.047389751192741546 }, "harness|hendrycksTest-management|5": { "acc": 0.7961165048543689, "acc_stderr": 0.039891398595317706, "acc_norm": 0.7961165048543689, "acc_norm_stderr": 0.039891398595317706 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8846153846153846, "acc_stderr": 0.020930193185179333, "acc_norm": 0.8846153846153846, "acc_norm_stderr": 0.020930193185179333 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.8, "acc_stderr": 0.04020151261036845, "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.822477650063857, "acc_stderr": 0.01366423099583483, "acc_norm": 0.822477650063857, "acc_norm_stderr": 0.01366423099583483 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7398843930635838, "acc_stderr": 0.023618678310069356, "acc_norm": 0.7398843930635838, "acc_norm_stderr": 0.023618678310069356 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4145251396648045, "acc_stderr": 0.016476342210254, "acc_norm": 0.4145251396648045, "acc_norm_stderr": 0.016476342210254 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7254901960784313, "acc_stderr": 0.025553169991826507, "acc_norm": 0.7254901960784313, "acc_norm_stderr": 0.025553169991826507 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7234726688102894, "acc_stderr": 0.02540383297817962, "acc_norm": 0.7234726688102894, "acc_norm_stderr": 0.02540383297817962 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.75, "acc_stderr": 0.02409347123262133, "acc_norm": 0.75, "acc_norm_stderr": 0.02409347123262133 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5106382978723404, "acc_stderr": 0.02982074719142244, "acc_norm": 0.5106382978723404, "acc_norm_stderr": 0.02982074719142244 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4810951760104302, "acc_stderr": 0.012761104871472655, "acc_norm": 0.4810951760104302, "acc_norm_stderr": 0.012761104871472655 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6801470588235294, "acc_stderr": 0.02833295951403121, "acc_norm": 0.6801470588235294, "acc_norm_stderr": 0.02833295951403121 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6683006535947712, "acc_stderr": 0.01904748523936038, "acc_norm": 0.6683006535947712, "acc_norm_stderr": 0.01904748523936038 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7090909090909091, "acc_stderr": 0.04350271442923243, "acc_norm": 0.7090909090909091, "acc_norm_stderr": 0.04350271442923243 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7346938775510204, "acc_stderr": 0.02826388994378459, "acc_norm": 0.7346938775510204, "acc_norm_stderr": 0.02826388994378459 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8507462686567164, "acc_stderr": 0.02519692987482707, "acc_norm": 0.8507462686567164, "acc_norm_stderr": 0.02519692987482707 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.03487350880197768, "acc_norm": 0.86, "acc_norm_stderr": 0.03487350880197768 }, "harness|hendrycksTest-virology|5": { "acc": 0.536144578313253, "acc_stderr": 0.038823108508905954, "acc_norm": 0.536144578313253, "acc_norm_stderr": 0.038823108508905954 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8128654970760234, "acc_stderr": 0.029913127232368053, "acc_norm": 0.8128654970760234, "acc_norm_stderr": 0.029913127232368053 }, "harness|truthfulqa:mc|0": { "mc1": 0.29253365973072215, "mc1_stderr": 0.015925597445286165, "mc2": 0.4487960219023809, "mc2_stderr": 0.014224892990272523 }, "harness|winogrande|5": { "acc": 0.7955801104972375, "acc_stderr": 0.011334090612597202 }, "harness|gsm8k|5": { "acc": 0.009855951478392721, "acc_stderr": 0.0027210765770416586 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_KnutJaegersberg__Walter-SOLAR-11B
[ "region:us" ]
2023-12-16T17:26:00+00:00
{"pretty_name": "Evaluation run of KnutJaegersberg/Walter-SOLAR-11B", "dataset_summary": "Dataset automatically created during the evaluation run of model [KnutJaegersberg/Walter-SOLAR-11B](https://huggingface.co/KnutJaegersberg/Walter-SOLAR-11B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_KnutJaegersberg__Walter-SOLAR-11B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T17:23:07.067772](https://huggingface.co/datasets/open-llm-leaderboard/details_KnutJaegersberg__Walter-SOLAR-11B/blob/main/results_2023-12-16T17-23-07.067772.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6404707310339822,\n \"acc_stderr\": 0.0318661190240921,\n \"acc_norm\": 0.6524926684204396,\n \"acc_norm_stderr\": 0.03268492668160191,\n \"mc1\": 0.29253365973072215,\n \"mc1_stderr\": 0.015925597445286165,\n \"mc2\": 0.4487960219023809,\n \"mc2_stderr\": 0.014224892990272523\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5639931740614335,\n \"acc_stderr\": 0.014491225699230918,\n \"acc_norm\": 0.6040955631399317,\n \"acc_norm_stderr\": 0.01429122839353659\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6549492133041227,\n \"acc_stderr\": 0.004744132825391527,\n \"acc_norm\": 0.848635729934276,\n \"acc_norm_stderr\": 0.003576711065619589\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04232073695151589,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04232073695151589\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7368421052631579,\n \"acc_stderr\": 0.03583496176361074,\n \"acc_norm\": 0.7368421052631579,\n \"acc_norm_stderr\": 0.03583496176361074\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695237,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695237\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6754716981132075,\n \"acc_stderr\": 0.028815615713432108,\n \"acc_norm\": 0.6754716981132075,\n \"acc_norm_stderr\": 0.028815615713432108\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7430555555555556,\n \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.7430555555555556,\n \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6473988439306358,\n \"acc_stderr\": 0.03643037168958548,\n \"acc_norm\": 0.6473988439306358,\n \"acc_norm_stderr\": 0.03643037168958548\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.04690650298201943,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.04690650298201943\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.574468085106383,\n \"acc_stderr\": 0.03232146916224468,\n \"acc_norm\": 0.574468085106383,\n \"acc_norm_stderr\": 0.03232146916224468\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.43859649122807015,\n \"acc_stderr\": 0.04668000738510455,\n \"acc_norm\": 0.43859649122807015,\n \"acc_norm_stderr\": 0.04668000738510455\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6068965517241379,\n \"acc_stderr\": 0.040703290137070705,\n \"acc_norm\": 0.6068965517241379,\n \"acc_norm_stderr\": 0.040703290137070705\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4312169312169312,\n \"acc_stderr\": 0.0255064816981382,\n \"acc_norm\": 0.4312169312169312,\n \"acc_norm_stderr\": 0.0255064816981382\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4126984126984127,\n \"acc_stderr\": 0.04403438954768176,\n \"acc_norm\": 0.4126984126984127,\n \"acc_norm_stderr\": 0.04403438954768176\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411019,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411019\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7838709677419354,\n \"acc_stderr\": 0.02341529343356852,\n \"acc_norm\": 0.7838709677419354,\n \"acc_norm_stderr\": 0.02341529343356852\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5221674876847291,\n \"acc_stderr\": 0.03514528562175007,\n \"acc_norm\": 0.5221674876847291,\n \"acc_norm_stderr\": 0.03514528562175007\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8282828282828283,\n \"acc_stderr\": 0.02686971618742991,\n \"acc_norm\": 0.8282828282828283,\n \"acc_norm_stderr\": 0.02686971618742991\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8704663212435233,\n \"acc_stderr\": 0.024233532297758723,\n \"acc_norm\": 0.8704663212435233,\n \"acc_norm_stderr\": 0.024233532297758723\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6461538461538462,\n \"acc_stderr\": 0.02424378399406216,\n \"acc_norm\": 0.6461538461538462,\n \"acc_norm_stderr\": 0.02424378399406216\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.37037037037037035,\n \"acc_stderr\": 0.02944316932303154,\n \"acc_norm\": 0.37037037037037035,\n \"acc_norm_stderr\": 0.02944316932303154\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6680672268907563,\n \"acc_stderr\": 0.03058869701378364,\n \"acc_norm\": 0.6680672268907563,\n \"acc_norm_stderr\": 0.03058869701378364\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.40397350993377484,\n \"acc_stderr\": 0.040064856853653415,\n \"acc_norm\": 0.40397350993377484,\n \"acc_norm_stderr\": 0.040064856853653415\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8458715596330275,\n \"acc_stderr\": 0.015480826865374296,\n \"acc_norm\": 0.8458715596330275,\n \"acc_norm_stderr\": 0.015480826865374296\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5601851851851852,\n \"acc_stderr\": 0.033851779760448106,\n \"acc_norm\": 0.5601851851851852,\n \"acc_norm_stderr\": 0.033851779760448106\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8480392156862745,\n \"acc_stderr\": 0.025195658428931792,\n \"acc_norm\": 0.8480392156862745,\n \"acc_norm_stderr\": 0.025195658428931792\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8438818565400844,\n \"acc_stderr\": 0.023627159460318667,\n \"acc_norm\": 0.8438818565400844,\n \"acc_norm_stderr\": 0.023627159460318667\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7130044843049327,\n \"acc_stderr\": 0.030360379710291954,\n \"acc_norm\": 0.7130044843049327,\n \"acc_norm_stderr\": 0.030360379710291954\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7480916030534351,\n \"acc_stderr\": 0.03807387116306086,\n \"acc_norm\": 0.7480916030534351,\n \"acc_norm_stderr\": 0.03807387116306086\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8016528925619835,\n \"acc_stderr\": 0.03640118271990945,\n \"acc_norm\": 0.8016528925619835,\n \"acc_norm_stderr\": 0.03640118271990945\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8055555555555556,\n \"acc_stderr\": 0.038260763248848646,\n \"acc_norm\": 0.8055555555555556,\n \"acc_norm_stderr\": 0.038260763248848646\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6993865030674846,\n \"acc_stderr\": 0.03602511318806771,\n \"acc_norm\": 0.6993865030674846,\n \"acc_norm_stderr\": 0.03602511318806771\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4732142857142857,\n \"acc_stderr\": 0.047389751192741546,\n \"acc_norm\": 0.4732142857142857,\n \"acc_norm_stderr\": 0.047389751192741546\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7961165048543689,\n \"acc_stderr\": 0.039891398595317706,\n \"acc_norm\": 0.7961165048543689,\n \"acc_norm_stderr\": 0.039891398595317706\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8846153846153846,\n \"acc_stderr\": 0.020930193185179333,\n \"acc_norm\": 0.8846153846153846,\n \"acc_norm_stderr\": 0.020930193185179333\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036845,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.822477650063857,\n \"acc_stderr\": 0.01366423099583483,\n \"acc_norm\": 0.822477650063857,\n \"acc_norm_stderr\": 0.01366423099583483\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7398843930635838,\n \"acc_stderr\": 0.023618678310069356,\n \"acc_norm\": 0.7398843930635838,\n \"acc_norm_stderr\": 0.023618678310069356\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4145251396648045,\n \"acc_stderr\": 0.016476342210254,\n \"acc_norm\": 0.4145251396648045,\n \"acc_norm_stderr\": 0.016476342210254\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7254901960784313,\n \"acc_stderr\": 0.025553169991826507,\n \"acc_norm\": 0.7254901960784313,\n \"acc_norm_stderr\": 0.025553169991826507\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7234726688102894,\n \"acc_stderr\": 0.02540383297817962,\n \"acc_norm\": 0.7234726688102894,\n \"acc_norm_stderr\": 0.02540383297817962\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.02409347123262133,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.02409347123262133\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5106382978723404,\n \"acc_stderr\": 0.02982074719142244,\n \"acc_norm\": 0.5106382978723404,\n \"acc_norm_stderr\": 0.02982074719142244\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4810951760104302,\n \"acc_stderr\": 0.012761104871472655,\n \"acc_norm\": 0.4810951760104302,\n \"acc_norm_stderr\": 0.012761104871472655\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6801470588235294,\n \"acc_stderr\": 0.02833295951403121,\n \"acc_norm\": 0.6801470588235294,\n \"acc_norm_stderr\": 0.02833295951403121\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6683006535947712,\n \"acc_stderr\": 0.01904748523936038,\n \"acc_norm\": 0.6683006535947712,\n \"acc_norm_stderr\": 0.01904748523936038\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7090909090909091,\n \"acc_stderr\": 0.04350271442923243,\n \"acc_norm\": 0.7090909090909091,\n \"acc_norm_stderr\": 0.04350271442923243\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7346938775510204,\n \"acc_stderr\": 0.02826388994378459,\n \"acc_norm\": 0.7346938775510204,\n \"acc_norm_stderr\": 0.02826388994378459\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8507462686567164,\n \"acc_stderr\": 0.02519692987482707,\n \"acc_norm\": 0.8507462686567164,\n \"acc_norm_stderr\": 0.02519692987482707\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.03487350880197768,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.03487350880197768\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.536144578313253,\n \"acc_stderr\": 0.038823108508905954,\n \"acc_norm\": 0.536144578313253,\n \"acc_norm_stderr\": 0.038823108508905954\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8128654970760234,\n \"acc_stderr\": 0.029913127232368053,\n \"acc_norm\": 0.8128654970760234,\n \"acc_norm_stderr\": 0.029913127232368053\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.29253365973072215,\n \"mc1_stderr\": 0.015925597445286165,\n \"mc2\": 0.4487960219023809,\n \"mc2_stderr\": 0.014224892990272523\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7955801104972375,\n \"acc_stderr\": 0.011334090612597202\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.009855951478392721,\n \"acc_stderr\": 0.0027210765770416586\n }\n}\n```", "repo_url": "https://huggingface.co/KnutJaegersberg/Walter-SOLAR-11B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|arc:challenge|25_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|gsm8k|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hellaswag|10_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T17-23-07.067772.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["**/details_harness|winogrande|5_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T17-23-07.067772.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T17_23_07.067772", "path": ["results_2023-12-16T17-23-07.067772.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T17-23-07.067772.parquet"]}]}]}
2023-12-16T17:26:40+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of KnutJaegersberg/Walter-SOLAR-11B Dataset automatically created during the evaluation run of model KnutJaegersberg/Walter-SOLAR-11B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T17:23:07.067772(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of KnutJaegersberg/Walter-SOLAR-11B\n\n\n\nDataset automatically created during the evaluation run of model KnutJaegersberg/Walter-SOLAR-11B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T17:23:07.067772(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of KnutJaegersberg/Walter-SOLAR-11B\n\n\n\nDataset automatically created during the evaluation run of model KnutJaegersberg/Walter-SOLAR-11B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T17:23:07.067772(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 187, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of KnutJaegersberg/Walter-SOLAR-11B\n\n\n\nDataset automatically created during the evaluation run of model KnutJaegersberg/Walter-SOLAR-11B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T17:23:07.067772(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
80ad31a42c9540c146c92a29866d3e28bb2c8375
# Dataset Card for Evaluation run of deepnight-research/lil-c3po <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [deepnight-research/lil-c3po](https://huggingface.co/deepnight-research/lil-c3po) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_deepnight-research__lil-c3po", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T17:28:57.885828](https://huggingface.co/datasets/open-llm-leaderboard/details_deepnight-research__lil-c3po/blob/main/results_2023-12-16T17-28-57.885828.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6248592823720264, "acc_stderr": 0.032934207150823985, "acc_norm": 0.627774280407218, "acc_norm_stderr": 0.03360219710155188, "mc1": 0.5238678090575275, "mc1_stderr": 0.017483547156961567, "mc2": 0.6873119394140667, "mc2_stderr": 0.0149863398321527 }, "harness|arc:challenge|25": { "acc": 0.6262798634812287, "acc_stderr": 0.014137708601759091, "acc_norm": 0.6501706484641638, "acc_norm_stderr": 0.01393680921215829 }, "harness|hellaswag|10": { "acc": 0.6699860585540729, "acc_stderr": 0.004692567655961763, "acc_norm": 0.8444532961561442, "acc_norm_stderr": 0.0036168436913607627 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.04688261722621502, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621502 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6, "acc_stderr": 0.042320736951515885, "acc_norm": 0.6, "acc_norm_stderr": 0.042320736951515885 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6513157894736842, "acc_stderr": 0.0387813988879761, "acc_norm": 0.6513157894736842, "acc_norm_stderr": 0.0387813988879761 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.59, "acc_stderr": 0.049431107042371025, "acc_norm": 0.59, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7018867924528301, "acc_stderr": 0.028152837942493864, "acc_norm": 0.7018867924528301, "acc_norm_stderr": 0.028152837942493864 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7013888888888888, "acc_stderr": 0.03827052357950756, "acc_norm": 0.7013888888888888, "acc_norm_stderr": 0.03827052357950756 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.56, "acc_stderr": 0.049888765156985884, "acc_norm": 0.56, "acc_norm_stderr": 0.049888765156985884 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6184971098265896, "acc_stderr": 0.03703851193099521, "acc_norm": 0.6184971098265896, "acc_norm_stderr": 0.03703851193099521 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4117647058823529, "acc_stderr": 0.04897104952726367, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.04897104952726367 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.73, "acc_stderr": 0.04461960433384739, "acc_norm": 0.73, "acc_norm_stderr": 0.04461960433384739 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5617021276595745, "acc_stderr": 0.03243618636108101, "acc_norm": 0.5617021276595745, "acc_norm_stderr": 0.03243618636108101 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4473684210526316, "acc_stderr": 0.04677473004491199, "acc_norm": 0.4473684210526316, "acc_norm_stderr": 0.04677473004491199 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.593103448275862, "acc_stderr": 0.04093793981266236, "acc_norm": 0.593103448275862, "acc_norm_stderr": 0.04093793981266236 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.37566137566137564, "acc_stderr": 0.02494236893115979, "acc_norm": 0.37566137566137564, "acc_norm_stderr": 0.02494236893115979 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4523809523809524, "acc_stderr": 0.044518079590553275, "acc_norm": 0.4523809523809524, "acc_norm_stderr": 0.044518079590553275 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7290322580645161, "acc_stderr": 0.025284416114900156, "acc_norm": 0.7290322580645161, "acc_norm_stderr": 0.025284416114900156 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5172413793103449, "acc_stderr": 0.03515895551165698, "acc_norm": 0.5172413793103449, "acc_norm_stderr": 0.03515895551165698 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7575757575757576, "acc_stderr": 0.03346409881055953, "acc_norm": 0.7575757575757576, "acc_norm_stderr": 0.03346409881055953 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.803030303030303, "acc_stderr": 0.028335609732463362, "acc_norm": 0.803030303030303, "acc_norm_stderr": 0.028335609732463362 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8497409326424871, "acc_stderr": 0.02578772318072388, "acc_norm": 0.8497409326424871, "acc_norm_stderr": 0.02578772318072388 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6256410256410256, "acc_stderr": 0.024537591572830506, "acc_norm": 0.6256410256410256, "acc_norm_stderr": 0.024537591572830506 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.362962962962963, "acc_stderr": 0.02931820364520686, "acc_norm": 0.362962962962963, "acc_norm_stderr": 0.02931820364520686 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6554621848739496, "acc_stderr": 0.030868682604121622, "acc_norm": 0.6554621848739496, "acc_norm_stderr": 0.030868682604121622 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.37748344370860926, "acc_stderr": 0.03958027231121569, "acc_norm": 0.37748344370860926, "acc_norm_stderr": 0.03958027231121569 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8110091743119267, "acc_stderr": 0.016785481159203624, "acc_norm": 0.8110091743119267, "acc_norm_stderr": 0.016785481159203624 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5324074074074074, "acc_stderr": 0.03402801581358966, "acc_norm": 0.5324074074074074, "acc_norm_stderr": 0.03402801581358966 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7647058823529411, "acc_stderr": 0.029771775228145628, "acc_norm": 0.7647058823529411, "acc_norm_stderr": 0.029771775228145628 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7468354430379747, "acc_stderr": 0.028304657943035286, "acc_norm": 0.7468354430379747, "acc_norm_stderr": 0.028304657943035286 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6860986547085202, "acc_stderr": 0.031146796482972465, "acc_norm": 0.6860986547085202, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7175572519083969, "acc_stderr": 0.03948406125768361, "acc_norm": 0.7175572519083969, "acc_norm_stderr": 0.03948406125768361 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8016528925619835, "acc_stderr": 0.036401182719909456, "acc_norm": 0.8016528925619835, "acc_norm_stderr": 0.036401182719909456 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6944444444444444, "acc_stderr": 0.04453197507374984, "acc_norm": 0.6944444444444444, "acc_norm_stderr": 0.04453197507374984 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7484662576687117, "acc_stderr": 0.03408997886857529, "acc_norm": 0.7484662576687117, "acc_norm_stderr": 0.03408997886857529 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.44642857142857145, "acc_stderr": 0.047184714852195886, "acc_norm": 0.44642857142857145, "acc_norm_stderr": 0.047184714852195886 }, "harness|hendrycksTest-management|5": { "acc": 0.7475728155339806, "acc_stderr": 0.04301250399690878, "acc_norm": 0.7475728155339806, "acc_norm_stderr": 0.04301250399690878 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8632478632478633, "acc_stderr": 0.022509033937077802, "acc_norm": 0.8632478632478633, "acc_norm_stderr": 0.022509033937077802 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.68, "acc_stderr": 0.04688261722621504, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7943805874840357, "acc_stderr": 0.01445250045678583, "acc_norm": 0.7943805874840357, "acc_norm_stderr": 0.01445250045678583 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6705202312138728, "acc_stderr": 0.025305258131879702, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.025305258131879702 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.48268156424581005, "acc_stderr": 0.016712467441702517, "acc_norm": 0.48268156424581005, "acc_norm_stderr": 0.016712467441702517 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6928104575163399, "acc_stderr": 0.02641560191438899, "acc_norm": 0.6928104575163399, "acc_norm_stderr": 0.02641560191438899 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6688102893890675, "acc_stderr": 0.02673062072800491, "acc_norm": 0.6688102893890675, "acc_norm_stderr": 0.02673062072800491 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6759259259259259, "acc_stderr": 0.02604176620271716, "acc_norm": 0.6759259259259259, "acc_norm_stderr": 0.02604176620271716 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.46099290780141844, "acc_stderr": 0.029736592526424438, "acc_norm": 0.46099290780141844, "acc_norm_stderr": 0.029736592526424438 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4406779661016949, "acc_stderr": 0.012680037994097074, "acc_norm": 0.4406779661016949, "acc_norm_stderr": 0.012680037994097074 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6323529411764706, "acc_stderr": 0.02928941340940319, "acc_norm": 0.6323529411764706, "acc_norm_stderr": 0.02928941340940319 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6225490196078431, "acc_stderr": 0.01961085147488029, "acc_norm": 0.6225490196078431, "acc_norm_stderr": 0.01961085147488029 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6818181818181818, "acc_stderr": 0.04461272175910509, "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.04461272175910509 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7061224489795919, "acc_stderr": 0.02916273841024977, "acc_norm": 0.7061224489795919, "acc_norm_stderr": 0.02916273841024977 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7910447761194029, "acc_stderr": 0.028748298931728655, "acc_norm": 0.7910447761194029, "acc_norm_stderr": 0.028748298931728655 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.84, "acc_stderr": 0.03684529491774708, "acc_norm": 0.84, "acc_norm_stderr": 0.03684529491774708 }, "harness|hendrycksTest-virology|5": { "acc": 0.4759036144578313, "acc_stderr": 0.03887971849597264, "acc_norm": 0.4759036144578313, "acc_norm_stderr": 0.03887971849597264 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8538011695906432, "acc_stderr": 0.027097290118070806, "acc_norm": 0.8538011695906432, "acc_norm_stderr": 0.027097290118070806 }, "harness|truthfulqa:mc|0": { "mc1": 0.5238678090575275, "mc1_stderr": 0.017483547156961567, "mc2": 0.6873119394140667, "mc2_stderr": 0.0149863398321527 }, "harness|winogrande|5": { "acc": 0.7916337805840569, "acc_stderr": 0.011414554399987745 }, "harness|gsm8k|5": { "acc": 0.4844579226686884, "acc_stderr": 0.013765829454512893 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_deepnight-research__lil-c3po
[ "region:us" ]
2023-12-16T17:31:46+00:00
{"pretty_name": "Evaluation run of deepnight-research/lil-c3po", "dataset_summary": "Dataset automatically created during the evaluation run of model [deepnight-research/lil-c3po](https://huggingface.co/deepnight-research/lil-c3po) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_deepnight-research__lil-c3po\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T17:28:57.885828](https://huggingface.co/datasets/open-llm-leaderboard/details_deepnight-research__lil-c3po/blob/main/results_2023-12-16T17-28-57.885828.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6248592823720264,\n \"acc_stderr\": 0.032934207150823985,\n \"acc_norm\": 0.627774280407218,\n \"acc_norm_stderr\": 0.03360219710155188,\n \"mc1\": 0.5238678090575275,\n \"mc1_stderr\": 0.017483547156961567,\n \"mc2\": 0.6873119394140667,\n \"mc2_stderr\": 0.0149863398321527\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6262798634812287,\n \"acc_stderr\": 0.014137708601759091,\n \"acc_norm\": 0.6501706484641638,\n \"acc_norm_stderr\": 0.01393680921215829\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6699860585540729,\n \"acc_stderr\": 0.004692567655961763,\n \"acc_norm\": 0.8444532961561442,\n \"acc_norm_stderr\": 0.0036168436913607627\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621502,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621502\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.042320736951515885,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.042320736951515885\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6513157894736842,\n \"acc_stderr\": 0.0387813988879761,\n \"acc_norm\": 0.6513157894736842,\n \"acc_norm_stderr\": 0.0387813988879761\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.59,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7018867924528301,\n \"acc_stderr\": 0.028152837942493864,\n \"acc_norm\": 0.7018867924528301,\n \"acc_norm_stderr\": 0.028152837942493864\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7013888888888888,\n \"acc_stderr\": 0.03827052357950756,\n \"acc_norm\": 0.7013888888888888,\n \"acc_norm_stderr\": 0.03827052357950756\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.049888765156985884,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.049888765156985884\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6184971098265896,\n \"acc_stderr\": 0.03703851193099521,\n \"acc_norm\": 0.6184971098265896,\n \"acc_norm_stderr\": 0.03703851193099521\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.04897104952726367,\n \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.04897104952726367\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.04461960433384739,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.04461960433384739\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5617021276595745,\n \"acc_stderr\": 0.03243618636108101,\n \"acc_norm\": 0.5617021276595745,\n \"acc_norm_stderr\": 0.03243618636108101\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4473684210526316,\n \"acc_stderr\": 0.04677473004491199,\n \"acc_norm\": 0.4473684210526316,\n \"acc_norm_stderr\": 0.04677473004491199\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.593103448275862,\n \"acc_stderr\": 0.04093793981266236,\n \"acc_norm\": 0.593103448275862,\n \"acc_norm_stderr\": 0.04093793981266236\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.37566137566137564,\n \"acc_stderr\": 0.02494236893115979,\n \"acc_norm\": 0.37566137566137564,\n \"acc_norm_stderr\": 0.02494236893115979\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4523809523809524,\n \"acc_stderr\": 0.044518079590553275,\n \"acc_norm\": 0.4523809523809524,\n \"acc_norm_stderr\": 0.044518079590553275\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7290322580645161,\n \"acc_stderr\": 0.025284416114900156,\n \"acc_norm\": 0.7290322580645161,\n \"acc_norm_stderr\": 0.025284416114900156\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5172413793103449,\n \"acc_stderr\": 0.03515895551165698,\n \"acc_norm\": 0.5172413793103449,\n \"acc_norm_stderr\": 0.03515895551165698\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7575757575757576,\n \"acc_stderr\": 0.03346409881055953,\n \"acc_norm\": 0.7575757575757576,\n \"acc_norm_stderr\": 0.03346409881055953\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.803030303030303,\n \"acc_stderr\": 0.028335609732463362,\n \"acc_norm\": 0.803030303030303,\n \"acc_norm_stderr\": 0.028335609732463362\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8497409326424871,\n \"acc_stderr\": 0.02578772318072388,\n \"acc_norm\": 0.8497409326424871,\n \"acc_norm_stderr\": 0.02578772318072388\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6256410256410256,\n \"acc_stderr\": 0.024537591572830506,\n \"acc_norm\": 0.6256410256410256,\n \"acc_norm_stderr\": 0.024537591572830506\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.362962962962963,\n \"acc_stderr\": 0.02931820364520686,\n \"acc_norm\": 0.362962962962963,\n \"acc_norm_stderr\": 0.02931820364520686\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6554621848739496,\n \"acc_stderr\": 0.030868682604121622,\n \"acc_norm\": 0.6554621848739496,\n \"acc_norm_stderr\": 0.030868682604121622\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.37748344370860926,\n \"acc_stderr\": 0.03958027231121569,\n \"acc_norm\": 0.37748344370860926,\n \"acc_norm_stderr\": 0.03958027231121569\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8110091743119267,\n \"acc_stderr\": 0.016785481159203624,\n \"acc_norm\": 0.8110091743119267,\n \"acc_norm_stderr\": 0.016785481159203624\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5324074074074074,\n \"acc_stderr\": 0.03402801581358966,\n \"acc_norm\": 0.5324074074074074,\n \"acc_norm_stderr\": 0.03402801581358966\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7647058823529411,\n \"acc_stderr\": 0.029771775228145628,\n \"acc_norm\": 0.7647058823529411,\n \"acc_norm_stderr\": 0.029771775228145628\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7468354430379747,\n \"acc_stderr\": 0.028304657943035286,\n \"acc_norm\": 0.7468354430379747,\n \"acc_norm_stderr\": 0.028304657943035286\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6860986547085202,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.6860986547085202,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7175572519083969,\n \"acc_stderr\": 0.03948406125768361,\n \"acc_norm\": 0.7175572519083969,\n \"acc_norm_stderr\": 0.03948406125768361\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8016528925619835,\n \"acc_stderr\": 0.036401182719909456,\n \"acc_norm\": 0.8016528925619835,\n \"acc_norm_stderr\": 0.036401182719909456\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6944444444444444,\n \"acc_stderr\": 0.04453197507374984,\n \"acc_norm\": 0.6944444444444444,\n \"acc_norm_stderr\": 0.04453197507374984\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7484662576687117,\n \"acc_stderr\": 0.03408997886857529,\n \"acc_norm\": 0.7484662576687117,\n \"acc_norm_stderr\": 0.03408997886857529\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.44642857142857145,\n \"acc_stderr\": 0.047184714852195886,\n \"acc_norm\": 0.44642857142857145,\n \"acc_norm_stderr\": 0.047184714852195886\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7475728155339806,\n \"acc_stderr\": 0.04301250399690878,\n \"acc_norm\": 0.7475728155339806,\n \"acc_norm_stderr\": 0.04301250399690878\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8632478632478633,\n \"acc_stderr\": 0.022509033937077802,\n \"acc_norm\": 0.8632478632478633,\n \"acc_norm_stderr\": 0.022509033937077802\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7943805874840357,\n \"acc_stderr\": 0.01445250045678583,\n \"acc_norm\": 0.7943805874840357,\n \"acc_norm_stderr\": 0.01445250045678583\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.025305258131879702,\n \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.025305258131879702\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.48268156424581005,\n \"acc_stderr\": 0.016712467441702517,\n \"acc_norm\": 0.48268156424581005,\n \"acc_norm_stderr\": 0.016712467441702517\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6928104575163399,\n \"acc_stderr\": 0.02641560191438899,\n \"acc_norm\": 0.6928104575163399,\n \"acc_norm_stderr\": 0.02641560191438899\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6688102893890675,\n \"acc_stderr\": 0.02673062072800491,\n \"acc_norm\": 0.6688102893890675,\n \"acc_norm_stderr\": 0.02673062072800491\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6759259259259259,\n \"acc_stderr\": 0.02604176620271716,\n \"acc_norm\": 0.6759259259259259,\n \"acc_norm_stderr\": 0.02604176620271716\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.46099290780141844,\n \"acc_stderr\": 0.029736592526424438,\n \"acc_norm\": 0.46099290780141844,\n \"acc_norm_stderr\": 0.029736592526424438\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4406779661016949,\n \"acc_stderr\": 0.012680037994097074,\n \"acc_norm\": 0.4406779661016949,\n \"acc_norm_stderr\": 0.012680037994097074\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6323529411764706,\n \"acc_stderr\": 0.02928941340940319,\n \"acc_norm\": 0.6323529411764706,\n \"acc_norm_stderr\": 0.02928941340940319\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6225490196078431,\n \"acc_stderr\": 0.01961085147488029,\n \"acc_norm\": 0.6225490196078431,\n \"acc_norm_stderr\": 0.01961085147488029\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6818181818181818,\n \"acc_stderr\": 0.04461272175910509,\n \"acc_norm\": 0.6818181818181818,\n \"acc_norm_stderr\": 0.04461272175910509\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7061224489795919,\n \"acc_stderr\": 0.02916273841024977,\n \"acc_norm\": 0.7061224489795919,\n \"acc_norm_stderr\": 0.02916273841024977\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7910447761194029,\n \"acc_stderr\": 0.028748298931728655,\n \"acc_norm\": 0.7910447761194029,\n \"acc_norm_stderr\": 0.028748298931728655\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.84,\n \"acc_stderr\": 0.03684529491774708,\n \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.03684529491774708\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4759036144578313,\n \"acc_stderr\": 0.03887971849597264,\n \"acc_norm\": 0.4759036144578313,\n \"acc_norm_stderr\": 0.03887971849597264\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8538011695906432,\n \"acc_stderr\": 0.027097290118070806,\n \"acc_norm\": 0.8538011695906432,\n \"acc_norm_stderr\": 0.027097290118070806\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5238678090575275,\n \"mc1_stderr\": 0.017483547156961567,\n \"mc2\": 0.6873119394140667,\n \"mc2_stderr\": 0.0149863398321527\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7916337805840569,\n \"acc_stderr\": 0.011414554399987745\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.4844579226686884,\n \"acc_stderr\": 0.013765829454512893\n }\n}\n```", "repo_url": "https://huggingface.co/deepnight-research/lil-c3po", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|arc:challenge|25_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|gsm8k|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hellaswag|10_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T17-28-57.885828.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["**/details_harness|winogrande|5_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T17-28-57.885828.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T17_28_57.885828", "path": ["results_2023-12-16T17-28-57.885828.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T17-28-57.885828.parquet"]}]}]}
2023-12-16T17:32:28+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of deepnight-research/lil-c3po Dataset automatically created during the evaluation run of model deepnight-research/lil-c3po on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T17:28:57.885828(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of deepnight-research/lil-c3po\n\n\n\nDataset automatically created during the evaluation run of model deepnight-research/lil-c3po on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T17:28:57.885828(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of deepnight-research/lil-c3po\n\n\n\nDataset automatically created during the evaluation run of model deepnight-research/lil-c3po on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T17:28:57.885828(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 183, 66, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of deepnight-research/lil-c3po\n\n\n\nDataset automatically created during the evaluation run of model deepnight-research/lil-c3po on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T17:28:57.885828(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
05d980e29e3f2521c0485a862a4556dbe24d76d1
# Dataset Card for Evaluation run of rwitz2/go-bruins-v2.1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [rwitz2/go-bruins-v2.1](https://huggingface.co/rwitz2/go-bruins-v2.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_rwitz2__go-bruins-v2.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T18:03:44.088903](https://huggingface.co/datasets/open-llm-leaderboard/details_rwitz2__go-bruins-v2.1/blob/main/results_2023-12-16T18-03-44.088903.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6553225925804701, "acc_stderr": 0.03195384406971471, "acc_norm": 0.6550937116146209, "acc_norm_stderr": 0.03261254123382002, "mc1": 0.5507955936352509, "mc1_stderr": 0.01741294198611529, "mc2": 0.6916071027497777, "mc2_stderr": 0.015051840495248825 }, "harness|arc:challenge|25": { "acc": 0.6885665529010239, "acc_stderr": 0.013532472099850942, "acc_norm": 0.7192832764505119, "acc_norm_stderr": 0.013131238126975574 }, "harness|hellaswag|10": { "acc": 0.7122087233618801, "acc_stderr": 0.0045180805945280195, "acc_norm": 0.8832901812387971, "acc_norm_stderr": 0.0032041800729423783 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6444444444444445, "acc_stderr": 0.04135176749720386, "acc_norm": 0.6444444444444445, "acc_norm_stderr": 0.04135176749720386 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7039473684210527, "acc_stderr": 0.03715062154998904, "acc_norm": 0.7039473684210527, "acc_norm_stderr": 0.03715062154998904 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.62, "acc_stderr": 0.048783173121456316, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.720754716981132, "acc_stderr": 0.027611163402399715, "acc_norm": 0.720754716981132, "acc_norm_stderr": 0.027611163402399715 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7708333333333334, "acc_stderr": 0.03514697467862388, "acc_norm": 0.7708333333333334, "acc_norm_stderr": 0.03514697467862388 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.57, "acc_stderr": 0.04975698519562428, "acc_norm": 0.57, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6936416184971098, "acc_stderr": 0.035149425512674394, "acc_norm": 0.6936416184971098, "acc_norm_stderr": 0.035149425512674394 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.46078431372549017, "acc_stderr": 0.04959859966384181, "acc_norm": 0.46078431372549017, "acc_norm_stderr": 0.04959859966384181 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5787234042553191, "acc_stderr": 0.03227834510146268, "acc_norm": 0.5787234042553191, "acc_norm_stderr": 0.03227834510146268 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5793103448275863, "acc_stderr": 0.0411391498118926, "acc_norm": 0.5793103448275863, "acc_norm_stderr": 0.0411391498118926 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42328042328042326, "acc_stderr": 0.025446365634406783, "acc_norm": 0.42328042328042326, "acc_norm_stderr": 0.025446365634406783 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4603174603174603, "acc_stderr": 0.04458029125470973, "acc_norm": 0.4603174603174603, "acc_norm_stderr": 0.04458029125470973 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7838709677419354, "acc_stderr": 0.02341529343356853, "acc_norm": 0.7838709677419354, "acc_norm_stderr": 0.02341529343356853 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5172413793103449, "acc_stderr": 0.035158955511656986, "acc_norm": 0.5172413793103449, "acc_norm_stderr": 0.035158955511656986 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7757575757575758, "acc_stderr": 0.03256866661681102, "acc_norm": 0.7757575757575758, "acc_norm_stderr": 0.03256866661681102 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7929292929292929, "acc_stderr": 0.028869778460267042, "acc_norm": 0.7929292929292929, "acc_norm_stderr": 0.028869778460267042 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8963730569948186, "acc_stderr": 0.02199531196364424, "acc_norm": 0.8963730569948186, "acc_norm_stderr": 0.02199531196364424 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6666666666666666, "acc_stderr": 0.023901157979402538, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.023901157979402538 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.32592592592592595, "acc_stderr": 0.028578348365473082, "acc_norm": 0.32592592592592595, "acc_norm_stderr": 0.028578348365473082 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6722689075630253, "acc_stderr": 0.03048991141767323, "acc_norm": 0.6722689075630253, "acc_norm_stderr": 0.03048991141767323 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.038615575462551684, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.038615575462551684 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8532110091743119, "acc_stderr": 0.01517314184512625, "acc_norm": 0.8532110091743119, "acc_norm_stderr": 0.01517314184512625 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5138888888888888, "acc_stderr": 0.034086558679777494, "acc_norm": 0.5138888888888888, "acc_norm_stderr": 0.034086558679777494 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8284313725490197, "acc_stderr": 0.026460569561240647, "acc_norm": 0.8284313725490197, "acc_norm_stderr": 0.026460569561240647 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7974683544303798, "acc_stderr": 0.026160568246601443, "acc_norm": 0.7974683544303798, "acc_norm_stderr": 0.026160568246601443 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6905829596412556, "acc_stderr": 0.03102441174057221, "acc_norm": 0.6905829596412556, "acc_norm_stderr": 0.03102441174057221 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8244274809160306, "acc_stderr": 0.033368203384760736, "acc_norm": 0.8244274809160306, "acc_norm_stderr": 0.033368203384760736 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8099173553719008, "acc_stderr": 0.03581796951709282, "acc_norm": 0.8099173553719008, "acc_norm_stderr": 0.03581796951709282 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7777777777777778, "acc_stderr": 0.0401910747255735, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.0401910747255735 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7914110429447853, "acc_stderr": 0.03192193448934724, "acc_norm": 0.7914110429447853, "acc_norm_stderr": 0.03192193448934724 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4107142857142857, "acc_stderr": 0.04669510663875191, "acc_norm": 0.4107142857142857, "acc_norm_stderr": 0.04669510663875191 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8888888888888888, "acc_stderr": 0.020588491316092375, "acc_norm": 0.8888888888888888, "acc_norm_stderr": 0.020588491316092375 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8339719029374202, "acc_stderr": 0.013306478243066302, "acc_norm": 0.8339719029374202, "acc_norm_stderr": 0.013306478243066302 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7427745664739884, "acc_stderr": 0.02353292543104429, "acc_norm": 0.7427745664739884, "acc_norm_stderr": 0.02353292543104429 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4860335195530726, "acc_stderr": 0.01671597641074452, "acc_norm": 0.4860335195530726, "acc_norm_stderr": 0.01671597641074452 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7124183006535948, "acc_stderr": 0.02591780611714716, "acc_norm": 0.7124183006535948, "acc_norm_stderr": 0.02591780611714716 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7202572347266881, "acc_stderr": 0.025494259350694912, "acc_norm": 0.7202572347266881, "acc_norm_stderr": 0.025494259350694912 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7469135802469136, "acc_stderr": 0.024191808600713, "acc_norm": 0.7469135802469136, "acc_norm_stderr": 0.024191808600713 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4574468085106383, "acc_stderr": 0.02971928127223685, "acc_norm": 0.4574468085106383, "acc_norm_stderr": 0.02971928127223685 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4706649282920469, "acc_stderr": 0.012748238397365549, "acc_norm": 0.4706649282920469, "acc_norm_stderr": 0.012748238397365549 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6911764705882353, "acc_stderr": 0.02806499816704009, "acc_norm": 0.6911764705882353, "acc_norm_stderr": 0.02806499816704009 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6748366013071896, "acc_stderr": 0.018950886770806315, "acc_norm": 0.6748366013071896, "acc_norm_stderr": 0.018950886770806315 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7224489795918367, "acc_stderr": 0.028666857790274648, "acc_norm": 0.7224489795918367, "acc_norm_stderr": 0.028666857790274648 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8407960199004975, "acc_stderr": 0.025870646766169136, "acc_norm": 0.8407960199004975, "acc_norm_stderr": 0.025870646766169136 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.0348735088019777, "acc_norm": 0.86, "acc_norm_stderr": 0.0348735088019777 }, "harness|hendrycksTest-virology|5": { "acc": 0.5542168674698795, "acc_stderr": 0.03869543323472101, "acc_norm": 0.5542168674698795, "acc_norm_stderr": 0.03869543323472101 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8245614035087719, "acc_stderr": 0.029170885500727665, "acc_norm": 0.8245614035087719, "acc_norm_stderr": 0.029170885500727665 }, "harness|truthfulqa:mc|0": { "mc1": 0.5507955936352509, "mc1_stderr": 0.01741294198611529, "mc2": 0.6916071027497777, "mc2_stderr": 0.015051840495248825 }, "harness|winogrande|5": { "acc": 0.8216258879242304, "acc_stderr": 0.010759352014855934 }, "harness|gsm8k|5": { "acc": 0.7043214556482184, "acc_stderr": 0.012570068947898779 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_rwitz2__go-bruins-v2.1
[ "region:us" ]
2023-12-16T18:06:34+00:00
{"pretty_name": "Evaluation run of rwitz2/go-bruins-v2.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [rwitz2/go-bruins-v2.1](https://huggingface.co/rwitz2/go-bruins-v2.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_rwitz2__go-bruins-v2.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T18:03:44.088903](https://huggingface.co/datasets/open-llm-leaderboard/details_rwitz2__go-bruins-v2.1/blob/main/results_2023-12-16T18-03-44.088903.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6553225925804701,\n \"acc_stderr\": 0.03195384406971471,\n \"acc_norm\": 0.6550937116146209,\n \"acc_norm_stderr\": 0.03261254123382002,\n \"mc1\": 0.5507955936352509,\n \"mc1_stderr\": 0.01741294198611529,\n \"mc2\": 0.6916071027497777,\n \"mc2_stderr\": 0.015051840495248825\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6885665529010239,\n \"acc_stderr\": 0.013532472099850942,\n \"acc_norm\": 0.7192832764505119,\n \"acc_norm_stderr\": 0.013131238126975574\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7122087233618801,\n \"acc_stderr\": 0.0045180805945280195,\n \"acc_norm\": 0.8832901812387971,\n \"acc_norm_stderr\": 0.0032041800729423783\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6444444444444445,\n \"acc_stderr\": 0.04135176749720386,\n \"acc_norm\": 0.6444444444444445,\n \"acc_norm_stderr\": 0.04135176749720386\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7039473684210527,\n \"acc_stderr\": 0.03715062154998904,\n \"acc_norm\": 0.7039473684210527,\n \"acc_norm_stderr\": 0.03715062154998904\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.720754716981132,\n \"acc_stderr\": 0.027611163402399715,\n \"acc_norm\": 0.720754716981132,\n \"acc_norm_stderr\": 0.027611163402399715\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7708333333333334,\n \"acc_stderr\": 0.03514697467862388,\n \"acc_norm\": 0.7708333333333334,\n \"acc_norm_stderr\": 0.03514697467862388\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6936416184971098,\n \"acc_stderr\": 0.035149425512674394,\n \"acc_norm\": 0.6936416184971098,\n \"acc_norm_stderr\": 0.035149425512674394\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.46078431372549017,\n \"acc_stderr\": 0.04959859966384181,\n \"acc_norm\": 0.46078431372549017,\n \"acc_norm_stderr\": 0.04959859966384181\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5787234042553191,\n \"acc_stderr\": 0.03227834510146268,\n \"acc_norm\": 0.5787234042553191,\n \"acc_norm_stderr\": 0.03227834510146268\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5793103448275863,\n \"acc_stderr\": 0.0411391498118926,\n \"acc_norm\": 0.5793103448275863,\n \"acc_norm_stderr\": 0.0411391498118926\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42328042328042326,\n \"acc_stderr\": 0.025446365634406783,\n \"acc_norm\": 0.42328042328042326,\n \"acc_norm_stderr\": 0.025446365634406783\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4603174603174603,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.4603174603174603,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252604,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252604\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7838709677419354,\n \"acc_stderr\": 0.02341529343356853,\n \"acc_norm\": 0.7838709677419354,\n \"acc_norm_stderr\": 0.02341529343356853\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5172413793103449,\n \"acc_stderr\": 0.035158955511656986,\n \"acc_norm\": 0.5172413793103449,\n \"acc_norm_stderr\": 0.035158955511656986\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.03256866661681102,\n \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.03256866661681102\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7929292929292929,\n \"acc_stderr\": 0.028869778460267042,\n \"acc_norm\": 0.7929292929292929,\n \"acc_norm_stderr\": 0.028869778460267042\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.02199531196364424,\n \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.02199531196364424\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.023901157979402538,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.023901157979402538\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.32592592592592595,\n \"acc_stderr\": 0.028578348365473082,\n \"acc_norm\": 0.32592592592592595,\n \"acc_norm_stderr\": 0.028578348365473082\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6722689075630253,\n \"acc_stderr\": 0.03048991141767323,\n \"acc_norm\": 0.6722689075630253,\n \"acc_norm_stderr\": 0.03048991141767323\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33774834437086093,\n \"acc_stderr\": 0.038615575462551684,\n \"acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.038615575462551684\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8532110091743119,\n \"acc_stderr\": 0.01517314184512625,\n \"acc_norm\": 0.8532110091743119,\n \"acc_norm_stderr\": 0.01517314184512625\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5138888888888888,\n \"acc_stderr\": 0.034086558679777494,\n \"acc_norm\": 0.5138888888888888,\n \"acc_norm_stderr\": 0.034086558679777494\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8284313725490197,\n \"acc_stderr\": 0.026460569561240647,\n \"acc_norm\": 0.8284313725490197,\n \"acc_norm_stderr\": 0.026460569561240647\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7974683544303798,\n \"acc_stderr\": 0.026160568246601443,\n \"acc_norm\": 0.7974683544303798,\n \"acc_norm_stderr\": 0.026160568246601443\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6905829596412556,\n \"acc_stderr\": 0.03102441174057221,\n \"acc_norm\": 0.6905829596412556,\n \"acc_norm_stderr\": 0.03102441174057221\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8244274809160306,\n \"acc_stderr\": 0.033368203384760736,\n \"acc_norm\": 0.8244274809160306,\n \"acc_norm_stderr\": 0.033368203384760736\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8099173553719008,\n \"acc_stderr\": 0.03581796951709282,\n \"acc_norm\": 0.8099173553719008,\n \"acc_norm_stderr\": 0.03581796951709282\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.0401910747255735,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.0401910747255735\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7914110429447853,\n \"acc_stderr\": 0.03192193448934724,\n \"acc_norm\": 0.7914110429447853,\n \"acc_norm_stderr\": 0.03192193448934724\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4107142857142857,\n \"acc_stderr\": 0.04669510663875191,\n \"acc_norm\": 0.4107142857142857,\n \"acc_norm_stderr\": 0.04669510663875191\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8888888888888888,\n \"acc_stderr\": 0.020588491316092375,\n \"acc_norm\": 0.8888888888888888,\n \"acc_norm_stderr\": 0.020588491316092375\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8339719029374202,\n \"acc_stderr\": 0.013306478243066302,\n \"acc_norm\": 0.8339719029374202,\n \"acc_norm_stderr\": 0.013306478243066302\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7427745664739884,\n \"acc_stderr\": 0.02353292543104429,\n \"acc_norm\": 0.7427745664739884,\n \"acc_norm_stderr\": 0.02353292543104429\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4860335195530726,\n \"acc_stderr\": 0.01671597641074452,\n \"acc_norm\": 0.4860335195530726,\n \"acc_norm_stderr\": 0.01671597641074452\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7124183006535948,\n \"acc_stderr\": 0.02591780611714716,\n \"acc_norm\": 0.7124183006535948,\n \"acc_norm_stderr\": 0.02591780611714716\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7202572347266881,\n \"acc_stderr\": 0.025494259350694912,\n \"acc_norm\": 0.7202572347266881,\n \"acc_norm_stderr\": 0.025494259350694912\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7469135802469136,\n \"acc_stderr\": 0.024191808600713,\n \"acc_norm\": 0.7469135802469136,\n \"acc_norm_stderr\": 0.024191808600713\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4574468085106383,\n \"acc_stderr\": 0.02971928127223685,\n \"acc_norm\": 0.4574468085106383,\n \"acc_norm_stderr\": 0.02971928127223685\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4706649282920469,\n \"acc_stderr\": 0.012748238397365549,\n \"acc_norm\": 0.4706649282920469,\n \"acc_norm_stderr\": 0.012748238397365549\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6911764705882353,\n \"acc_stderr\": 0.02806499816704009,\n \"acc_norm\": 0.6911764705882353,\n \"acc_norm_stderr\": 0.02806499816704009\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6748366013071896,\n \"acc_stderr\": 0.018950886770806315,\n \"acc_norm\": 0.6748366013071896,\n \"acc_norm_stderr\": 0.018950886770806315\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7224489795918367,\n \"acc_stderr\": 0.028666857790274648,\n \"acc_norm\": 0.7224489795918367,\n \"acc_norm_stderr\": 0.028666857790274648\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.025870646766169136,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.025870646766169136\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.0348735088019777,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.0348735088019777\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5542168674698795,\n \"acc_stderr\": 0.03869543323472101,\n \"acc_norm\": 0.5542168674698795,\n \"acc_norm_stderr\": 0.03869543323472101\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8245614035087719,\n \"acc_stderr\": 0.029170885500727665,\n \"acc_norm\": 0.8245614035087719,\n \"acc_norm_stderr\": 0.029170885500727665\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5507955936352509,\n \"mc1_stderr\": 0.01741294198611529,\n \"mc2\": 0.6916071027497777,\n \"mc2_stderr\": 0.015051840495248825\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8216258879242304,\n \"acc_stderr\": 0.010759352014855934\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7043214556482184,\n \"acc_stderr\": 0.012570068947898779\n }\n}\n```", "repo_url": "https://huggingface.co/rwitz2/go-bruins-v2.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|arc:challenge|25_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|gsm8k|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hellaswag|10_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T18-03-44.088903.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["**/details_harness|winogrande|5_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T18-03-44.088903.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T18_03_44.088903", "path": ["results_2023-12-16T18-03-44.088903.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T18-03-44.088903.parquet"]}]}]}
2023-12-16T18:07:17+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of rwitz2/go-bruins-v2.1 Dataset automatically created during the evaluation run of model rwitz2/go-bruins-v2.1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T18:03:44.088903(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of rwitz2/go-bruins-v2.1\n\n\n\nDataset automatically created during the evaluation run of model rwitz2/go-bruins-v2.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T18:03:44.088903(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of rwitz2/go-bruins-v2.1\n\n\n\nDataset automatically created during the evaluation run of model rwitz2/go-bruins-v2.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T18:03:44.088903(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 181, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of rwitz2/go-bruins-v2.1\n\n\n\nDataset automatically created during the evaluation run of model rwitz2/go-bruins-v2.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T18:03:44.088903(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
3454ec71dbea328cf973370028e65cc08c30ab87
# Dataset Card for Evaluation run of steve-cse/MelloGPT <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [steve-cse/MelloGPT](https://huggingface.co/steve-cse/MelloGPT) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_steve-cse__MelloGPT", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T18:07:59.704375](https://huggingface.co/datasets/open-llm-leaderboard/details_steve-cse__MelloGPT/blob/main/results_2023-12-16T18-07-59.704375.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5580941639624716, "acc_stderr": 0.03400113351875304, "acc_norm": 0.5629530798562845, "acc_norm_stderr": 0.03471812426856797, "mc1": 0.38555691554467564, "mc1_stderr": 0.017038839010591673, "mc2": 0.5560955080575972, "mc2_stderr": 0.014788612119825833 }, "harness|arc:challenge|25": { "acc": 0.5247440273037542, "acc_stderr": 0.014593487694937738, "acc_norm": 0.53839590443686, "acc_norm_stderr": 0.01456824555029636 }, "harness|hellaswag|10": { "acc": 0.5646285600477993, "acc_stderr": 0.004947922692688834, "acc_norm": 0.761202947619996, "acc_norm_stderr": 0.004254771367531346 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4074074074074074, "acc_stderr": 0.04244633238353229, "acc_norm": 0.4074074074074074, "acc_norm_stderr": 0.04244633238353229 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5460526315789473, "acc_stderr": 0.04051646342874142, "acc_norm": 0.5460526315789473, "acc_norm_stderr": 0.04051646342874142 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.49, "acc_stderr": 0.05024183937956911, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956911 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6113207547169811, "acc_stderr": 0.030000485448675986, "acc_norm": 0.6113207547169811, "acc_norm_stderr": 0.030000485448675986 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6041666666666666, "acc_stderr": 0.04089465449325582, "acc_norm": 0.6041666666666666, "acc_norm_stderr": 0.04089465449325582 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5549132947976878, "acc_stderr": 0.03789401760283648, "acc_norm": 0.5549132947976878, "acc_norm_stderr": 0.03789401760283648 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.30392156862745096, "acc_stderr": 0.045766654032077636, "acc_norm": 0.30392156862745096, "acc_norm_stderr": 0.045766654032077636 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.48936170212765956, "acc_stderr": 0.03267862331014063, "acc_norm": 0.48936170212765956, "acc_norm_stderr": 0.03267862331014063 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3508771929824561, "acc_stderr": 0.04489539350270699, "acc_norm": 0.3508771929824561, "acc_norm_stderr": 0.04489539350270699 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5586206896551724, "acc_stderr": 0.04137931034482757, "acc_norm": 0.5586206896551724, "acc_norm_stderr": 0.04137931034482757 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3783068783068783, "acc_stderr": 0.024976954053155254, "acc_norm": 0.3783068783068783, "acc_norm_stderr": 0.024976954053155254 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.36507936507936506, "acc_stderr": 0.043062412591271526, "acc_norm": 0.36507936507936506, "acc_norm_stderr": 0.043062412591271526 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6548387096774193, "acc_stderr": 0.02704574657353433, "acc_norm": 0.6548387096774193, "acc_norm_stderr": 0.02704574657353433 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.43349753694581283, "acc_stderr": 0.03486731727419872, "acc_norm": 0.43349753694581283, "acc_norm_stderr": 0.03486731727419872 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6727272727272727, "acc_stderr": 0.03663974994391244, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.03663974994391244 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7272727272727273, "acc_stderr": 0.03173071239071724, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.03173071239071724 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7409326424870466, "acc_stderr": 0.031618779179354115, "acc_norm": 0.7409326424870466, "acc_norm_stderr": 0.031618779179354115 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5307692307692308, "acc_stderr": 0.025302958890850154, "acc_norm": 0.5307692307692308, "acc_norm_stderr": 0.025302958890850154 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3074074074074074, "acc_stderr": 0.028133252578815635, "acc_norm": 0.3074074074074074, "acc_norm_stderr": 0.028133252578815635 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5630252100840336, "acc_stderr": 0.03221943636566196, "acc_norm": 0.5630252100840336, "acc_norm_stderr": 0.03221943636566196 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.038615575462551684, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.038615575462551684 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7174311926605504, "acc_stderr": 0.019304243497707152, "acc_norm": 0.7174311926605504, "acc_norm_stderr": 0.019304243497707152 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.48148148148148145, "acc_stderr": 0.03407632093854052, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.03407632093854052 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.6764705882352942, "acc_stderr": 0.032834720561085606, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.032834720561085606 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.679324894514768, "acc_stderr": 0.0303819319499904, "acc_norm": 0.679324894514768, "acc_norm_stderr": 0.0303819319499904 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6367713004484304, "acc_stderr": 0.032277904428505, "acc_norm": 0.6367713004484304, "acc_norm_stderr": 0.032277904428505 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7251908396946565, "acc_stderr": 0.03915345408847834, "acc_norm": 0.7251908396946565, "acc_norm_stderr": 0.03915345408847834 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6363636363636364, "acc_stderr": 0.043913262867240704, "acc_norm": 0.6363636363636364, "acc_norm_stderr": 0.043913262867240704 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6851851851851852, "acc_stderr": 0.04489931073591312, "acc_norm": 0.6851851851851852, "acc_norm_stderr": 0.04489931073591312 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6809815950920245, "acc_stderr": 0.03661997551073836, "acc_norm": 0.6809815950920245, "acc_norm_stderr": 0.03661997551073836 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5089285714285714, "acc_stderr": 0.04745033255489122, "acc_norm": 0.5089285714285714, "acc_norm_stderr": 0.04745033255489122 }, "harness|hendrycksTest-management|5": { "acc": 0.6699029126213593, "acc_stderr": 0.04656147110012351, "acc_norm": 0.6699029126213593, "acc_norm_stderr": 0.04656147110012351 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8632478632478633, "acc_stderr": 0.022509033937077788, "acc_norm": 0.8632478632478633, "acc_norm_stderr": 0.022509033937077788 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.67, "acc_stderr": 0.047258156262526094, "acc_norm": 0.67, "acc_norm_stderr": 0.047258156262526094 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7471264367816092, "acc_stderr": 0.015543377313719681, "acc_norm": 0.7471264367816092, "acc_norm_stderr": 0.015543377313719681 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5982658959537572, "acc_stderr": 0.026394104177643637, "acc_norm": 0.5982658959537572, "acc_norm_stderr": 0.026394104177643637 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.25251396648044694, "acc_stderr": 0.014530330201468647, "acc_norm": 0.25251396648044694, "acc_norm_stderr": 0.014530330201468647 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6405228758169934, "acc_stderr": 0.027475969910660952, "acc_norm": 0.6405228758169934, "acc_norm_stderr": 0.027475969910660952 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6141479099678456, "acc_stderr": 0.027648149599751464, "acc_norm": 0.6141479099678456, "acc_norm_stderr": 0.027648149599751464 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5925925925925926, "acc_stderr": 0.027339546640662734, "acc_norm": 0.5925925925925926, "acc_norm_stderr": 0.027339546640662734 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.375886524822695, "acc_stderr": 0.028893955412115882, "acc_norm": 0.375886524822695, "acc_norm_stderr": 0.028893955412115882 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.39765319426336376, "acc_stderr": 0.012499840347460642, "acc_norm": 0.39765319426336376, "acc_norm_stderr": 0.012499840347460642 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5588235294117647, "acc_stderr": 0.030161911930767102, "acc_norm": 0.5588235294117647, "acc_norm_stderr": 0.030161911930767102 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5277777777777778, "acc_stderr": 0.020196594933541194, "acc_norm": 0.5277777777777778, "acc_norm_stderr": 0.020196594933541194 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6545454545454545, "acc_stderr": 0.04554619617541054, "acc_norm": 0.6545454545454545, "acc_norm_stderr": 0.04554619617541054 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6530612244897959, "acc_stderr": 0.030472526026726492, "acc_norm": 0.6530612244897959, "acc_norm_stderr": 0.030472526026726492 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7611940298507462, "acc_stderr": 0.03014777593540922, "acc_norm": 0.7611940298507462, "acc_norm_stderr": 0.03014777593540922 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.79, "acc_stderr": 0.040936018074033256, "acc_norm": 0.79, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-virology|5": { "acc": 0.5, "acc_stderr": 0.03892494720807614, "acc_norm": 0.5, "acc_norm_stderr": 0.03892494720807614 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7309941520467836, "acc_stderr": 0.03401052620104089, "acc_norm": 0.7309941520467836, "acc_norm_stderr": 0.03401052620104089 }, "harness|truthfulqa:mc|0": { "mc1": 0.38555691554467564, "mc1_stderr": 0.017038839010591673, "mc2": 0.5560955080575972, "mc2_stderr": 0.014788612119825833 }, "harness|winogrande|5": { "acc": 0.7387529597474349, "acc_stderr": 0.012346914863415314 }, "harness|gsm8k|5": { "acc": 0.3009855951478393, "acc_stderr": 0.0126345044652112 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_steve-cse__MelloGPT
[ "region:us" ]
2023-12-16T18:10:52+00:00
{"pretty_name": "Evaluation run of steve-cse/MelloGPT", "dataset_summary": "Dataset automatically created during the evaluation run of model [steve-cse/MelloGPT](https://huggingface.co/steve-cse/MelloGPT) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_steve-cse__MelloGPT\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T18:07:59.704375](https://huggingface.co/datasets/open-llm-leaderboard/details_steve-cse__MelloGPT/blob/main/results_2023-12-16T18-07-59.704375.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5580941639624716,\n \"acc_stderr\": 0.03400113351875304,\n \"acc_norm\": 0.5629530798562845,\n \"acc_norm_stderr\": 0.03471812426856797,\n \"mc1\": 0.38555691554467564,\n \"mc1_stderr\": 0.017038839010591673,\n \"mc2\": 0.5560955080575972,\n \"mc2_stderr\": 0.014788612119825833\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5247440273037542,\n \"acc_stderr\": 0.014593487694937738,\n \"acc_norm\": 0.53839590443686,\n \"acc_norm_stderr\": 0.01456824555029636\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5646285600477993,\n \"acc_stderr\": 0.004947922692688834,\n \"acc_norm\": 0.761202947619996,\n \"acc_norm_stderr\": 0.004254771367531346\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4074074074074074,\n \"acc_stderr\": 0.04244633238353229,\n \"acc_norm\": 0.4074074074074074,\n \"acc_norm_stderr\": 0.04244633238353229\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5460526315789473,\n \"acc_stderr\": 0.04051646342874142,\n \"acc_norm\": 0.5460526315789473,\n \"acc_norm_stderr\": 0.04051646342874142\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956911,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956911\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6113207547169811,\n \"acc_stderr\": 0.030000485448675986,\n \"acc_norm\": 0.6113207547169811,\n \"acc_norm_stderr\": 0.030000485448675986\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6041666666666666,\n \"acc_stderr\": 0.04089465449325582,\n \"acc_norm\": 0.6041666666666666,\n \"acc_norm_stderr\": 0.04089465449325582\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5549132947976878,\n \"acc_stderr\": 0.03789401760283648,\n \"acc_norm\": 0.5549132947976878,\n \"acc_norm_stderr\": 0.03789401760283648\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.30392156862745096,\n \"acc_stderr\": 0.045766654032077636,\n \"acc_norm\": 0.30392156862745096,\n \"acc_norm_stderr\": 0.045766654032077636\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.48936170212765956,\n \"acc_stderr\": 0.03267862331014063,\n \"acc_norm\": 0.48936170212765956,\n \"acc_norm_stderr\": 0.03267862331014063\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3508771929824561,\n \"acc_stderr\": 0.04489539350270699,\n \"acc_norm\": 0.3508771929824561,\n \"acc_norm_stderr\": 0.04489539350270699\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5586206896551724,\n \"acc_stderr\": 0.04137931034482757,\n \"acc_norm\": 0.5586206896551724,\n \"acc_norm_stderr\": 0.04137931034482757\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3783068783068783,\n \"acc_stderr\": 0.024976954053155254,\n \"acc_norm\": 0.3783068783068783,\n \"acc_norm_stderr\": 0.024976954053155254\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.36507936507936506,\n \"acc_stderr\": 0.043062412591271526,\n \"acc_norm\": 0.36507936507936506,\n \"acc_norm_stderr\": 0.043062412591271526\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6548387096774193,\n \"acc_stderr\": 0.02704574657353433,\n \"acc_norm\": 0.6548387096774193,\n \"acc_norm_stderr\": 0.02704574657353433\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.43349753694581283,\n \"acc_stderr\": 0.03486731727419872,\n \"acc_norm\": 0.43349753694581283,\n \"acc_norm_stderr\": 0.03486731727419872\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.03663974994391244,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.03663974994391244\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7272727272727273,\n \"acc_stderr\": 0.03173071239071724,\n \"acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.03173071239071724\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.7409326424870466,\n \"acc_stderr\": 0.031618779179354115,\n \"acc_norm\": 0.7409326424870466,\n \"acc_norm_stderr\": 0.031618779179354115\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5307692307692308,\n \"acc_stderr\": 0.025302958890850154,\n \"acc_norm\": 0.5307692307692308,\n \"acc_norm_stderr\": 0.025302958890850154\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3074074074074074,\n \"acc_stderr\": 0.028133252578815635,\n \"acc_norm\": 0.3074074074074074,\n \"acc_norm_stderr\": 0.028133252578815635\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.5630252100840336,\n \"acc_stderr\": 0.03221943636566196,\n \"acc_norm\": 0.5630252100840336,\n \"acc_norm_stderr\": 0.03221943636566196\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33774834437086093,\n \"acc_stderr\": 0.038615575462551684,\n \"acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.038615575462551684\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7174311926605504,\n \"acc_stderr\": 0.019304243497707152,\n \"acc_norm\": 0.7174311926605504,\n \"acc_norm_stderr\": 0.019304243497707152\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.48148148148148145,\n \"acc_stderr\": 0.03407632093854052,\n \"acc_norm\": 0.48148148148148145,\n \"acc_norm_stderr\": 0.03407632093854052\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.032834720561085606,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.032834720561085606\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.679324894514768,\n \"acc_stderr\": 0.0303819319499904,\n \"acc_norm\": 0.679324894514768,\n \"acc_norm_stderr\": 0.0303819319499904\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6367713004484304,\n \"acc_stderr\": 0.032277904428505,\n \"acc_norm\": 0.6367713004484304,\n \"acc_norm_stderr\": 0.032277904428505\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7251908396946565,\n \"acc_stderr\": 0.03915345408847834,\n \"acc_norm\": 0.7251908396946565,\n \"acc_norm_stderr\": 0.03915345408847834\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6363636363636364,\n \"acc_stderr\": 0.043913262867240704,\n \"acc_norm\": 0.6363636363636364,\n \"acc_norm_stderr\": 0.043913262867240704\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6851851851851852,\n \"acc_stderr\": 0.04489931073591312,\n \"acc_norm\": 0.6851851851851852,\n \"acc_norm_stderr\": 0.04489931073591312\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6809815950920245,\n \"acc_stderr\": 0.03661997551073836,\n \"acc_norm\": 0.6809815950920245,\n \"acc_norm_stderr\": 0.03661997551073836\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5089285714285714,\n \"acc_stderr\": 0.04745033255489122,\n \"acc_norm\": 0.5089285714285714,\n \"acc_norm_stderr\": 0.04745033255489122\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6699029126213593,\n \"acc_stderr\": 0.04656147110012351,\n \"acc_norm\": 0.6699029126213593,\n \"acc_norm_stderr\": 0.04656147110012351\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8632478632478633,\n \"acc_stderr\": 0.022509033937077788,\n \"acc_norm\": 0.8632478632478633,\n \"acc_norm_stderr\": 0.022509033937077788\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.047258156262526094,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.047258156262526094\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7471264367816092,\n \"acc_stderr\": 0.015543377313719681,\n \"acc_norm\": 0.7471264367816092,\n \"acc_norm_stderr\": 0.015543377313719681\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.5982658959537572,\n \"acc_stderr\": 0.026394104177643637,\n \"acc_norm\": 0.5982658959537572,\n \"acc_norm_stderr\": 0.026394104177643637\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.25251396648044694,\n \"acc_stderr\": 0.014530330201468647,\n \"acc_norm\": 0.25251396648044694,\n \"acc_norm_stderr\": 0.014530330201468647\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6405228758169934,\n \"acc_stderr\": 0.027475969910660952,\n \"acc_norm\": 0.6405228758169934,\n \"acc_norm_stderr\": 0.027475969910660952\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6141479099678456,\n \"acc_stderr\": 0.027648149599751464,\n \"acc_norm\": 0.6141479099678456,\n \"acc_norm_stderr\": 0.027648149599751464\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.5925925925925926,\n \"acc_stderr\": 0.027339546640662734,\n \"acc_norm\": 0.5925925925925926,\n \"acc_norm_stderr\": 0.027339546640662734\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.375886524822695,\n \"acc_stderr\": 0.028893955412115882,\n \"acc_norm\": 0.375886524822695,\n \"acc_norm_stderr\": 0.028893955412115882\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.39765319426336376,\n \"acc_stderr\": 0.012499840347460642,\n \"acc_norm\": 0.39765319426336376,\n \"acc_norm_stderr\": 0.012499840347460642\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5588235294117647,\n \"acc_stderr\": 0.030161911930767102,\n \"acc_norm\": 0.5588235294117647,\n \"acc_norm_stderr\": 0.030161911930767102\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.5277777777777778,\n \"acc_stderr\": 0.020196594933541194,\n \"acc_norm\": 0.5277777777777778,\n \"acc_norm_stderr\": 0.020196594933541194\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.6545454545454545,\n \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6530612244897959,\n \"acc_stderr\": 0.030472526026726492,\n \"acc_norm\": 0.6530612244897959,\n \"acc_norm_stderr\": 0.030472526026726492\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7611940298507462,\n \"acc_stderr\": 0.03014777593540922,\n \"acc_norm\": 0.7611940298507462,\n \"acc_norm_stderr\": 0.03014777593540922\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.03892494720807614,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.03892494720807614\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7309941520467836,\n \"acc_stderr\": 0.03401052620104089,\n \"acc_norm\": 0.7309941520467836,\n \"acc_norm_stderr\": 0.03401052620104089\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.38555691554467564,\n \"mc1_stderr\": 0.017038839010591673,\n \"mc2\": 0.5560955080575972,\n \"mc2_stderr\": 0.014788612119825833\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7387529597474349,\n \"acc_stderr\": 0.012346914863415314\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.3009855951478393,\n \"acc_stderr\": 0.0126345044652112\n }\n}\n```", "repo_url": "https://huggingface.co/steve-cse/MelloGPT", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|arc:challenge|25_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|gsm8k|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hellaswag|10_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T18-07-59.704375.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["**/details_harness|winogrande|5_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T18-07-59.704375.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T18_07_59.704375", "path": ["results_2023-12-16T18-07-59.704375.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T18-07-59.704375.parquet"]}]}]}
2023-12-16T18:11:31+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of steve-cse/MelloGPT Dataset automatically created during the evaluation run of model steve-cse/MelloGPT on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T18:07:59.704375(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of steve-cse/MelloGPT\n\n\n\nDataset automatically created during the evaluation run of model steve-cse/MelloGPT on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T18:07:59.704375(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of steve-cse/MelloGPT\n\n\n\nDataset automatically created during the evaluation run of model steve-cse/MelloGPT on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T18:07:59.704375(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 179, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of steve-cse/MelloGPT\n\n\n\nDataset automatically created during the evaluation run of model steve-cse/MelloGPT on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T18:07:59.704375(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
cf54dce2c67858fcd66828ef7069f05301dc14b4
# Dataset of vigna (Arknights) This is the dataset of vigna (Arknights), containing 210 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). This is a WebUI contains crawlers and other thing: ([LittleAppleWebUI](https://github.com/LittleApple-fp16/LittleAppleWebUI)) | Name | Images | Download | Description | |:----------------|---------:|:----------------------------------------|:-----------------------------------------------------------------------------------------| | raw | 210 | [Download](dataset-raw.zip) | Raw data with meta information. | | raw-stage3 | 570 | [Download](dataset-raw-stage3.zip) | 3-stage cropped raw data with meta information. | | raw-stage3-eyes | 632 | [Download](dataset-raw-stage3-eyes.zip) | 3-stage cropped (with eye-focus) raw data with meta information. | | 384x512 | 210 | [Download](dataset-384x512.zip) | 384x512 aligned dataset. | | 512x704 | 210 | [Download](dataset-512x704.zip) | 512x704 aligned dataset. | | 640x880 | 210 | [Download](dataset-640x880.zip) | 640x880 aligned dataset. | | stage3-640 | 570 | [Download](dataset-stage3-640.zip) | 3-stage cropped dataset with the shorter side not exceeding 640 pixels. | | stage3-800 | 570 | [Download](dataset-stage3-800.zip) | 3-stage cropped dataset with the shorter side not exceeding 800 pixels. | | stage3-p512-640 | 503 | [Download](dataset-stage3-p512-640.zip) | 3-stage cropped dataset with the area not less than 512x512 pixels. | | stage3-eyes-640 | 632 | [Download](dataset-stage3-eyes-640.zip) | 3-stage cropped (with eye-focus) dataset with the shorter side not exceeding 640 pixels. | | stage3-eyes-800 | 632 | [Download](dataset-stage3-eyes-800.zip) | 3-stage cropped (with eye-focus) dataset with the shorter side not exceeding 800 pixels. |
AppleHarem/vigna_arknights
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-12-16T18:22:51+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2023-12-16T18:23:17+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of vigna (Arknights) ============================ This is the dataset of vigna (Arknights), containing 210 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). This is a WebUI contains crawlers and other thing: (LittleAppleWebUI)
[]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n" ]
[ 44 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n" ]
07359327b92960167ea4a803985e8f347a52bc18
I scraped [mangaupdates](https://www.mangaupdates.com) for a project and i am sharing the data. There is a tar file which contians the json response from every infos entry. I parsed it and added it to a postgres database. The pgdump was uploaded too. There are some entries that do not exist anymore. It can be found in the removed ids json. <details> <summary>SQL structure</summary> I didnt try to make it a optimal strucure, but i tried to remove the redundancy of strings. ### Info ```sql create table info ( id serial primary key, private_id int, public_id bigint not null, forum_id bigint not null, url_key text not null, url_name text, titles text[] not null, description text, image_name text, typ int not null, year int, latest_chapter integer not null, rating integer not null, bayesian_rating float, genres int[] not null, tags int[] not null, tags_upvotes int[] not null, tags_downvotes int[] not null, tags_uploader bigint[] not null, status text, licensed boolean not null, completed boolean not null, author int[] not null, artist int[] not null, publisher_original int[] not null, publisher_english int[] not null, publication text[] not null, publication_publisher int[] not null, relations text[] not null, anime_start text, anime_end text, last_updated_mu TIMESTAMP, last_updated TIMESTAMP not null, created TIMESTAMP not null ); ``` ### Types ```sql create table if not exists mtypes ( id serial primary key, name text not null ); ``` ### Genres ```sql create table if not exists genres ( id serial primary key, name text not null ); ``` ### Tags ```sql create table if not exists tags ( id serial primary key, name text not null ); ``` ### People ```sql create table if not exists ppl ( id serial primary key, mu_id bigint, name text not null ); ``` </details>
Qqcf16426/mangaupdates
[ "size_categories:100K<n<1M", "language:en", "manga", "tags", "genres", "scraped", "region:us" ]
2023-12-16T18:37:41+00:00
{"language": ["en"], "size_categories": ["100K<n<1M"], "tags": ["manga", "tags", "genres", "scraped"]}
2023-12-16T19:10:09+00:00
[]
[ "en" ]
TAGS #size_categories-100K<n<1M #language-English #manga #tags #genres #scraped #region-us
I scraped mangaupdates for a project and i am sharing the data. There is a tar file which contians the json response from every infos entry. I parsed it and added it to a postgres database. The pgdump was uploaded too. There are some entries that do not exist anymore. It can be found in the removed ids json. <details> <summary>SQL structure</summary> I didnt try to make it a optimal strucure, but i tried to remove the redundancy of strings. ### Info ### Types ### Genres ### Tags ### People </details>
[ "### Info\n \n ### Types", "### Genres", "### Tags", "### People\n \n</details>" ]
[ "TAGS\n#size_categories-100K<n<1M #language-English #manga #tags #genres #scraped #region-us \n", "### Info\n \n ### Types", "### Genres", "### Tags", "### People\n \n</details>" ]
[ 34, 7, 4, 3, 8 ]
[ "passage: TAGS\n#size_categories-100K<n<1M #language-English #manga #tags #genres #scraped #region-us \n### Info\n \n ### Types### Genres### Tags### People\n \n</details>" ]
438605c2f11c1cffd2eb455ecadb11d0223721a4
# Dataset Card for Evaluation run of adamo1139/Mistral-7B-AEZAKMI-v1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [adamo1139/Mistral-7B-AEZAKMI-v1](https://huggingface.co/adamo1139/Mistral-7B-AEZAKMI-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_adamo1139__Mistral-7B-AEZAKMI-v1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T18:53:11.719846](https://huggingface.co/datasets/open-llm-leaderboard/details_adamo1139__Mistral-7B-AEZAKMI-v1/blob/main/results_2023-12-16T18-53-11.719846.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5806632675458097, "acc_stderr": 0.033329691230435686, "acc_norm": 0.5911675175599549, "acc_norm_stderr": 0.03419768825329829, "mc1": 0.3733170134638923, "mc1_stderr": 0.016932370557570634, "mc2": 0.5354200723033775, "mc2_stderr": 0.015605336386464858 }, "harness|arc:challenge|25": { "acc": 0.552901023890785, "acc_stderr": 0.01452938016052684, "acc_norm": 0.5887372013651877, "acc_norm_stderr": 0.014379441068522085 }, "harness|hellaswag|10": { "acc": 0.6337382991435969, "acc_stderr": 0.004807975515446489, "acc_norm": 0.8200557657837084, "acc_norm_stderr": 0.0038335592281586684 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5481481481481482, "acc_stderr": 0.042992689054808644, "acc_norm": 0.5481481481481482, "acc_norm_stderr": 0.042992689054808644 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6578947368421053, "acc_stderr": 0.0386073159931609, "acc_norm": 0.6578947368421053, "acc_norm_stderr": 0.0386073159931609 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6528301886792452, "acc_stderr": 0.029300101705549655, "acc_norm": 0.6528301886792452, "acc_norm_stderr": 0.029300101705549655 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.625, "acc_stderr": 0.04048439222695598, "acc_norm": 0.625, "acc_norm_stderr": 0.04048439222695598 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6069364161849711, "acc_stderr": 0.03724249595817731, "acc_norm": 0.6069364161849711, "acc_norm_stderr": 0.03724249595817731 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4215686274509804, "acc_stderr": 0.04913595201274498, "acc_norm": 0.4215686274509804, "acc_norm_stderr": 0.04913595201274498 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5191489361702127, "acc_stderr": 0.032662042990646796, "acc_norm": 0.5191489361702127, "acc_norm_stderr": 0.032662042990646796 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.38596491228070173, "acc_stderr": 0.045796394220704334, "acc_norm": 0.38596491228070173, "acc_norm_stderr": 0.045796394220704334 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5241379310344828, "acc_stderr": 0.0416180850350153, "acc_norm": 0.5241379310344828, "acc_norm_stderr": 0.0416180850350153 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42857142857142855, "acc_stderr": 0.025487187147859372, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.025487187147859372 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42063492063492064, "acc_stderr": 0.04415438226743744, "acc_norm": 0.42063492063492064, "acc_norm_stderr": 0.04415438226743744 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7032258064516129, "acc_stderr": 0.02598850079241189, "acc_norm": 0.7032258064516129, "acc_norm_stderr": 0.02598850079241189 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.43349753694581283, "acc_stderr": 0.034867317274198714, "acc_norm": 0.43349753694581283, "acc_norm_stderr": 0.034867317274198714 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.61, "acc_stderr": 0.04902071300001974, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001974 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7333333333333333, "acc_stderr": 0.03453131801885417, "acc_norm": 0.7333333333333333, "acc_norm_stderr": 0.03453131801885417 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7878787878787878, "acc_stderr": 0.029126522834586825, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.029126522834586825 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8082901554404145, "acc_stderr": 0.028408953626245282, "acc_norm": 0.8082901554404145, "acc_norm_stderr": 0.028408953626245282 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5769230769230769, "acc_stderr": 0.02504919787604234, "acc_norm": 0.5769230769230769, "acc_norm_stderr": 0.02504919787604234 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2962962962962963, "acc_stderr": 0.027840811495871934, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.027840811495871934 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5966386554621849, "acc_stderr": 0.031866081214088314, "acc_norm": 0.5966386554621849, "acc_norm_stderr": 0.031866081214088314 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.038615575462551684, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.038615575462551684 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7504587155963303, "acc_stderr": 0.018553897629501624, "acc_norm": 0.7504587155963303, "acc_norm_stderr": 0.018553897629501624 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4537037037037037, "acc_stderr": 0.03395322726375797, "acc_norm": 0.4537037037037037, "acc_norm_stderr": 0.03395322726375797 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7058823529411765, "acc_stderr": 0.03198001660115071, "acc_norm": 0.7058823529411765, "acc_norm_stderr": 0.03198001660115071 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7215189873417721, "acc_stderr": 0.029178682304842548, "acc_norm": 0.7215189873417721, "acc_norm_stderr": 0.029178682304842548 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6233183856502242, "acc_stderr": 0.03252113489929187, "acc_norm": 0.6233183856502242, "acc_norm_stderr": 0.03252113489929187 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7022900763358778, "acc_stderr": 0.040103589424622034, "acc_norm": 0.7022900763358778, "acc_norm_stderr": 0.040103589424622034 }, "harness|hendrycksTest-international_law|5": { "acc": 0.71900826446281, "acc_stderr": 0.04103203830514512, "acc_norm": 0.71900826446281, "acc_norm_stderr": 0.04103203830514512 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6851851851851852, "acc_stderr": 0.04489931073591312, "acc_norm": 0.6851851851851852, "acc_norm_stderr": 0.04489931073591312 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7423312883435583, "acc_stderr": 0.03436150827846917, "acc_norm": 0.7423312883435583, "acc_norm_stderr": 0.03436150827846917 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.42857142857142855, "acc_stderr": 0.04697113923010212, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.04697113923010212 }, "harness|hendrycksTest-management|5": { "acc": 0.7572815533980582, "acc_stderr": 0.04245022486384493, "acc_norm": 0.7572815533980582, "acc_norm_stderr": 0.04245022486384493 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8418803418803419, "acc_stderr": 0.023902325549560403, "acc_norm": 0.8418803418803419, "acc_norm_stderr": 0.023902325549560403 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.63, "acc_stderr": 0.04852365870939099, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7662835249042146, "acc_stderr": 0.015133383278988836, "acc_norm": 0.7662835249042146, "acc_norm_stderr": 0.015133383278988836 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6445086705202312, "acc_stderr": 0.025770292082977243, "acc_norm": 0.6445086705202312, "acc_norm_stderr": 0.025770292082977243 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2659217877094972, "acc_stderr": 0.014776765066438895, "acc_norm": 0.2659217877094972, "acc_norm_stderr": 0.014776765066438895 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6797385620915033, "acc_stderr": 0.02671611838015685, "acc_norm": 0.6797385620915033, "acc_norm_stderr": 0.02671611838015685 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6430868167202572, "acc_stderr": 0.027210420375934023, "acc_norm": 0.6430868167202572, "acc_norm_stderr": 0.027210420375934023 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6358024691358025, "acc_stderr": 0.026774929899722327, "acc_norm": 0.6358024691358025, "acc_norm_stderr": 0.026774929899722327 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.41843971631205673, "acc_stderr": 0.02942799403941999, "acc_norm": 0.41843971631205673, "acc_norm_stderr": 0.02942799403941999 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4002607561929596, "acc_stderr": 0.012513582529136213, "acc_norm": 0.4002607561929596, "acc_norm_stderr": 0.012513582529136213 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5882352941176471, "acc_stderr": 0.029896163033125474, "acc_norm": 0.5882352941176471, "acc_norm_stderr": 0.029896163033125474 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5915032679738562, "acc_stderr": 0.01988622103750187, "acc_norm": 0.5915032679738562, "acc_norm_stderr": 0.01988622103750187 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6363636363636364, "acc_stderr": 0.04607582090719976, "acc_norm": 0.6363636363636364, "acc_norm_stderr": 0.04607582090719976 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.636734693877551, "acc_stderr": 0.030789051139030806, "acc_norm": 0.636734693877551, "acc_norm_stderr": 0.030789051139030806 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7761194029850746, "acc_stderr": 0.029475250236017204, "acc_norm": 0.7761194029850746, "acc_norm_stderr": 0.029475250236017204 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.84, "acc_stderr": 0.036845294917747094, "acc_norm": 0.84, "acc_norm_stderr": 0.036845294917747094 }, "harness|hendrycksTest-virology|5": { "acc": 0.4939759036144578, "acc_stderr": 0.03892212195333045, "acc_norm": 0.4939759036144578, "acc_norm_stderr": 0.03892212195333045 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8070175438596491, "acc_stderr": 0.030267457554898458, "acc_norm": 0.8070175438596491, "acc_norm_stderr": 0.030267457554898458 }, "harness|truthfulqa:mc|0": { "mc1": 0.3733170134638923, "mc1_stderr": 0.016932370557570634, "mc2": 0.5354200723033775, "mc2_stderr": 0.015605336386464858 }, "harness|winogrande|5": { "acc": 0.7569060773480663, "acc_stderr": 0.012055665630431051 }, "harness|gsm8k|5": { "acc": 0.006823351023502654, "acc_stderr": 0.0022675371022545018 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_adamo1139__Mistral-7B-AEZAKMI-v1
[ "region:us" ]
2023-12-16T18:56:03+00:00
{"pretty_name": "Evaluation run of adamo1139/Mistral-7B-AEZAKMI-v1", "dataset_summary": "Dataset automatically created during the evaluation run of model [adamo1139/Mistral-7B-AEZAKMI-v1](https://huggingface.co/adamo1139/Mistral-7B-AEZAKMI-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_adamo1139__Mistral-7B-AEZAKMI-v1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T18:53:11.719846](https://huggingface.co/datasets/open-llm-leaderboard/details_adamo1139__Mistral-7B-AEZAKMI-v1/blob/main/results_2023-12-16T18-53-11.719846.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5806632675458097,\n \"acc_stderr\": 0.033329691230435686,\n \"acc_norm\": 0.5911675175599549,\n \"acc_norm_stderr\": 0.03419768825329829,\n \"mc1\": 0.3733170134638923,\n \"mc1_stderr\": 0.016932370557570634,\n \"mc2\": 0.5354200723033775,\n \"mc2_stderr\": 0.015605336386464858\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.552901023890785,\n \"acc_stderr\": 0.01452938016052684,\n \"acc_norm\": 0.5887372013651877,\n \"acc_norm_stderr\": 0.014379441068522085\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6337382991435969,\n \"acc_stderr\": 0.004807975515446489,\n \"acc_norm\": 0.8200557657837084,\n \"acc_norm_stderr\": 0.0038335592281586684\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5481481481481482,\n \"acc_stderr\": 0.042992689054808644,\n \"acc_norm\": 0.5481481481481482,\n \"acc_norm_stderr\": 0.042992689054808644\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6578947368421053,\n \"acc_stderr\": 0.0386073159931609,\n \"acc_norm\": 0.6578947368421053,\n \"acc_norm_stderr\": 0.0386073159931609\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6528301886792452,\n \"acc_stderr\": 0.029300101705549655,\n \"acc_norm\": 0.6528301886792452,\n \"acc_norm_stderr\": 0.029300101705549655\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.625,\n \"acc_stderr\": 0.04048439222695598,\n \"acc_norm\": 0.625,\n \"acc_norm_stderr\": 0.04048439222695598\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6069364161849711,\n \"acc_stderr\": 0.03724249595817731,\n \"acc_norm\": 0.6069364161849711,\n \"acc_norm_stderr\": 0.03724249595817731\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4215686274509804,\n \"acc_stderr\": 0.04913595201274498,\n \"acc_norm\": 0.4215686274509804,\n \"acc_norm_stderr\": 0.04913595201274498\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5191489361702127,\n \"acc_stderr\": 0.032662042990646796,\n \"acc_norm\": 0.5191489361702127,\n \"acc_norm_stderr\": 0.032662042990646796\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.38596491228070173,\n \"acc_stderr\": 0.045796394220704334,\n \"acc_norm\": 0.38596491228070173,\n \"acc_norm_stderr\": 0.045796394220704334\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5241379310344828,\n \"acc_stderr\": 0.0416180850350153,\n \"acc_norm\": 0.5241379310344828,\n \"acc_norm_stderr\": 0.0416180850350153\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.025487187147859372,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.025487187147859372\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42063492063492064,\n \"acc_stderr\": 0.04415438226743744,\n \"acc_norm\": 0.42063492063492064,\n \"acc_norm_stderr\": 0.04415438226743744\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7032258064516129,\n \"acc_stderr\": 0.02598850079241189,\n \"acc_norm\": 0.7032258064516129,\n \"acc_norm_stderr\": 0.02598850079241189\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.43349753694581283,\n \"acc_stderr\": 0.034867317274198714,\n \"acc_norm\": 0.43349753694581283,\n \"acc_norm_stderr\": 0.034867317274198714\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001974,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001974\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7333333333333333,\n \"acc_stderr\": 0.03453131801885417,\n \"acc_norm\": 0.7333333333333333,\n \"acc_norm_stderr\": 0.03453131801885417\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.029126522834586825,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.029126522834586825\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8082901554404145,\n \"acc_stderr\": 0.028408953626245282,\n \"acc_norm\": 0.8082901554404145,\n \"acc_norm_stderr\": 0.028408953626245282\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5769230769230769,\n \"acc_stderr\": 0.02504919787604234,\n \"acc_norm\": 0.5769230769230769,\n \"acc_norm_stderr\": 0.02504919787604234\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2962962962962963,\n \"acc_stderr\": 0.027840811495871934,\n \"acc_norm\": 0.2962962962962963,\n \"acc_norm_stderr\": 0.027840811495871934\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.5966386554621849,\n \"acc_stderr\": 0.031866081214088314,\n \"acc_norm\": 0.5966386554621849,\n \"acc_norm_stderr\": 0.031866081214088314\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33774834437086093,\n \"acc_stderr\": 0.038615575462551684,\n \"acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.038615575462551684\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7504587155963303,\n \"acc_stderr\": 0.018553897629501624,\n \"acc_norm\": 0.7504587155963303,\n \"acc_norm_stderr\": 0.018553897629501624\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4537037037037037,\n \"acc_stderr\": 0.03395322726375797,\n \"acc_norm\": 0.4537037037037037,\n \"acc_norm_stderr\": 0.03395322726375797\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7058823529411765,\n \"acc_stderr\": 0.03198001660115071,\n \"acc_norm\": 0.7058823529411765,\n \"acc_norm_stderr\": 0.03198001660115071\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7215189873417721,\n \"acc_stderr\": 0.029178682304842548,\n \"acc_norm\": 0.7215189873417721,\n \"acc_norm_stderr\": 0.029178682304842548\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6233183856502242,\n \"acc_stderr\": 0.03252113489929187,\n \"acc_norm\": 0.6233183856502242,\n \"acc_norm_stderr\": 0.03252113489929187\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7022900763358778,\n \"acc_stderr\": 0.040103589424622034,\n \"acc_norm\": 0.7022900763358778,\n \"acc_norm_stderr\": 0.040103589424622034\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.71900826446281,\n \"acc_stderr\": 0.04103203830514512,\n \"acc_norm\": 0.71900826446281,\n \"acc_norm_stderr\": 0.04103203830514512\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6851851851851852,\n \"acc_stderr\": 0.04489931073591312,\n \"acc_norm\": 0.6851851851851852,\n \"acc_norm_stderr\": 0.04489931073591312\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7423312883435583,\n \"acc_stderr\": 0.03436150827846917,\n \"acc_norm\": 0.7423312883435583,\n \"acc_norm_stderr\": 0.03436150827846917\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.04697113923010212,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.04697113923010212\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7572815533980582,\n \"acc_stderr\": 0.04245022486384493,\n \"acc_norm\": 0.7572815533980582,\n \"acc_norm_stderr\": 0.04245022486384493\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8418803418803419,\n \"acc_stderr\": 0.023902325549560403,\n \"acc_norm\": 0.8418803418803419,\n \"acc_norm_stderr\": 0.023902325549560403\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7662835249042146,\n \"acc_stderr\": 0.015133383278988836,\n \"acc_norm\": 0.7662835249042146,\n \"acc_norm_stderr\": 0.015133383278988836\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6445086705202312,\n \"acc_stderr\": 0.025770292082977243,\n \"acc_norm\": 0.6445086705202312,\n \"acc_norm_stderr\": 0.025770292082977243\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2659217877094972,\n \"acc_stderr\": 0.014776765066438895,\n \"acc_norm\": 0.2659217877094972,\n \"acc_norm_stderr\": 0.014776765066438895\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6797385620915033,\n \"acc_stderr\": 0.02671611838015685,\n \"acc_norm\": 0.6797385620915033,\n \"acc_norm_stderr\": 0.02671611838015685\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6430868167202572,\n \"acc_stderr\": 0.027210420375934023,\n \"acc_norm\": 0.6430868167202572,\n \"acc_norm_stderr\": 0.027210420375934023\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6358024691358025,\n \"acc_stderr\": 0.026774929899722327,\n \"acc_norm\": 0.6358024691358025,\n \"acc_norm_stderr\": 0.026774929899722327\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.41843971631205673,\n \"acc_stderr\": 0.02942799403941999,\n \"acc_norm\": 0.41843971631205673,\n \"acc_norm_stderr\": 0.02942799403941999\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4002607561929596,\n \"acc_stderr\": 0.012513582529136213,\n \"acc_norm\": 0.4002607561929596,\n \"acc_norm_stderr\": 0.012513582529136213\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5882352941176471,\n \"acc_stderr\": 0.029896163033125474,\n \"acc_norm\": 0.5882352941176471,\n \"acc_norm_stderr\": 0.029896163033125474\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.5915032679738562,\n \"acc_stderr\": 0.01988622103750187,\n \"acc_norm\": 0.5915032679738562,\n \"acc_norm_stderr\": 0.01988622103750187\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6363636363636364,\n \"acc_stderr\": 0.04607582090719976,\n \"acc_norm\": 0.6363636363636364,\n \"acc_norm_stderr\": 0.04607582090719976\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.636734693877551,\n \"acc_stderr\": 0.030789051139030806,\n \"acc_norm\": 0.636734693877551,\n \"acc_norm_stderr\": 0.030789051139030806\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7761194029850746,\n \"acc_stderr\": 0.029475250236017204,\n \"acc_norm\": 0.7761194029850746,\n \"acc_norm_stderr\": 0.029475250236017204\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.84,\n \"acc_stderr\": 0.036845294917747094,\n \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.036845294917747094\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4939759036144578,\n \"acc_stderr\": 0.03892212195333045,\n \"acc_norm\": 0.4939759036144578,\n \"acc_norm_stderr\": 0.03892212195333045\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8070175438596491,\n \"acc_stderr\": 0.030267457554898458,\n \"acc_norm\": 0.8070175438596491,\n \"acc_norm_stderr\": 0.030267457554898458\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3733170134638923,\n \"mc1_stderr\": 0.016932370557570634,\n \"mc2\": 0.5354200723033775,\n \"mc2_stderr\": 0.015605336386464858\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7569060773480663,\n \"acc_stderr\": 0.012055665630431051\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.006823351023502654,\n \"acc_stderr\": 0.0022675371022545018\n }\n}\n```", "repo_url": "https://huggingface.co/adamo1139/Mistral-7B-AEZAKMI-v1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|arc:challenge|25_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|gsm8k|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hellaswag|10_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T18-53-11.719846.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["**/details_harness|winogrande|5_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T18-53-11.719846.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T18_53_11.719846", "path": ["results_2023-12-16T18-53-11.719846.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T18-53-11.719846.parquet"]}]}]}
2023-12-16T18:56:42+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of adamo1139/Mistral-7B-AEZAKMI-v1 Dataset automatically created during the evaluation run of model adamo1139/Mistral-7B-AEZAKMI-v1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T18:53:11.719846(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of adamo1139/Mistral-7B-AEZAKMI-v1\n\n\n\nDataset automatically created during the evaluation run of model adamo1139/Mistral-7B-AEZAKMI-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T18:53:11.719846(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of adamo1139/Mistral-7B-AEZAKMI-v1\n\n\n\nDataset automatically created during the evaluation run of model adamo1139/Mistral-7B-AEZAKMI-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T18:53:11.719846(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 191, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of adamo1139/Mistral-7B-AEZAKMI-v1\n\n\n\nDataset automatically created during the evaluation run of model adamo1139/Mistral-7B-AEZAKMI-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T18:53:11.719846(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
904d8106d3f316719542daf2d3bbeaa6ac983d80
# Dataset Card for Evaluation run of janai-hq/trinity-v1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [janai-hq/trinity-v1](https://huggingface.co/janai-hq/trinity-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_janai-hq__trinity-v1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T19:03:31.389271](https://huggingface.co/datasets/open-llm-leaderboard/details_janai-hq__trinity-v1/blob/main/results_2023-12-16T19-03-31.389271.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6575877329335247, "acc_stderr": 0.031985421208388404, "acc_norm": 0.6571647268300141, "acc_norm_stderr": 0.032648337921958155, "mc1": 0.5507955936352509, "mc1_stderr": 0.01741294198611529, "mc2": 0.6931209356367747, "mc2_stderr": 0.015031530031665238 }, "harness|arc:challenge|25": { "acc": 0.6988054607508533, "acc_stderr": 0.013406741767847632, "acc_norm": 0.7226962457337884, "acc_norm_stderr": 0.013082095839059376 }, "harness|hellaswag|10": { "acc": 0.711113324039036, "acc_stderr": 0.004523188431142894, "acc_norm": 0.8835889265086636, "acc_norm_stderr": 0.0032006176493464752 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6592592592592592, "acc_stderr": 0.040943762699967926, "acc_norm": 0.6592592592592592, "acc_norm_stderr": 0.040943762699967926 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6907894736842105, "acc_stderr": 0.037610708698674805, "acc_norm": 0.6907894736842105, "acc_norm_stderr": 0.037610708698674805 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.62, "acc_stderr": 0.048783173121456316, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.720754716981132, "acc_stderr": 0.027611163402399715, "acc_norm": 0.720754716981132, "acc_norm_stderr": 0.027611163402399715 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7708333333333334, "acc_stderr": 0.03514697467862388, "acc_norm": 0.7708333333333334, "acc_norm_stderr": 0.03514697467862388 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.56, "acc_stderr": 0.049888765156985884, "acc_norm": 0.56, "acc_norm_stderr": 0.049888765156985884 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6994219653179191, "acc_stderr": 0.03496101481191179, "acc_norm": 0.6994219653179191, "acc_norm_stderr": 0.03496101481191179 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.46078431372549017, "acc_stderr": 0.04959859966384181, "acc_norm": 0.46078431372549017, "acc_norm_stderr": 0.04959859966384181 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.042295258468165065, "acc_norm": 0.77, "acc_norm_stderr": 0.042295258468165065 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5787234042553191, "acc_stderr": 0.03227834510146268, "acc_norm": 0.5787234042553191, "acc_norm_stderr": 0.03227834510146268 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5087719298245614, "acc_stderr": 0.04702880432049615, "acc_norm": 0.5087719298245614, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5655172413793104, "acc_stderr": 0.04130740879555498, "acc_norm": 0.5655172413793104, "acc_norm_stderr": 0.04130740879555498 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42328042328042326, "acc_stderr": 0.02544636563440678, "acc_norm": 0.42328042328042326, "acc_norm_stderr": 0.02544636563440678 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.46825396825396826, "acc_stderr": 0.04463112720677172, "acc_norm": 0.46825396825396826, "acc_norm_stderr": 0.04463112720677172 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7806451612903226, "acc_stderr": 0.023540799358723295, "acc_norm": 0.7806451612903226, "acc_norm_stderr": 0.023540799358723295 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5320197044334976, "acc_stderr": 0.03510766597959215, "acc_norm": 0.5320197044334976, "acc_norm_stderr": 0.03510766597959215 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7757575757575758, "acc_stderr": 0.03256866661681102, "acc_norm": 0.7757575757575758, "acc_norm_stderr": 0.03256866661681102 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7878787878787878, "acc_stderr": 0.029126522834586815, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.029126522834586815 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.02150024957603348, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.02150024957603348 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6666666666666666, "acc_stderr": 0.023901157979402538, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.023901157979402538 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34074074074074073, "acc_stderr": 0.028897748741131154, "acc_norm": 0.34074074074074073, "acc_norm_stderr": 0.028897748741131154 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6722689075630253, "acc_stderr": 0.03048991141767323, "acc_norm": 0.6722689075630253, "acc_norm_stderr": 0.03048991141767323 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3509933774834437, "acc_stderr": 0.03896981964257375, "acc_norm": 0.3509933774834437, "acc_norm_stderr": 0.03896981964257375 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8532110091743119, "acc_stderr": 0.01517314184512625, "acc_norm": 0.8532110091743119, "acc_norm_stderr": 0.01517314184512625 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5416666666666666, "acc_stderr": 0.03398110890294636, "acc_norm": 0.5416666666666666, "acc_norm_stderr": 0.03398110890294636 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8333333333333334, "acc_stderr": 0.026156867523931045, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.026156867523931045 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8059071729957806, "acc_stderr": 0.025744902532290916, "acc_norm": 0.8059071729957806, "acc_norm_stderr": 0.025744902532290916 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6905829596412556, "acc_stderr": 0.031024411740572213, "acc_norm": 0.6905829596412556, "acc_norm_stderr": 0.031024411740572213 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8244274809160306, "acc_stderr": 0.033368203384760736, "acc_norm": 0.8244274809160306, "acc_norm_stderr": 0.033368203384760736 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8016528925619835, "acc_stderr": 0.03640118271990947, "acc_norm": 0.8016528925619835, "acc_norm_stderr": 0.03640118271990947 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7777777777777778, "acc_stderr": 0.040191074725573483, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.040191074725573483 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7791411042944786, "acc_stderr": 0.03259177392742179, "acc_norm": 0.7791411042944786, "acc_norm_stderr": 0.03259177392742179 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.41964285714285715, "acc_stderr": 0.046840993210771065, "acc_norm": 0.41964285714285715, "acc_norm_stderr": 0.046840993210771065 }, "harness|hendrycksTest-management|5": { "acc": 0.7572815533980582, "acc_stderr": 0.04245022486384495, "acc_norm": 0.7572815533980582, "acc_norm_stderr": 0.04245022486384495 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8803418803418803, "acc_stderr": 0.021262719400406957, "acc_norm": 0.8803418803418803, "acc_norm_stderr": 0.021262719400406957 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8352490421455939, "acc_stderr": 0.013265346261323797, "acc_norm": 0.8352490421455939, "acc_norm_stderr": 0.013265346261323797 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7398843930635838, "acc_stderr": 0.023618678310069356, "acc_norm": 0.7398843930635838, "acc_norm_stderr": 0.023618678310069356 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4759776536312849, "acc_stderr": 0.016703190189300186, "acc_norm": 0.4759776536312849, "acc_norm_stderr": 0.016703190189300186 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7222222222222222, "acc_stderr": 0.025646863097137897, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.025646863097137897 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7170418006430869, "acc_stderr": 0.02558306248998481, "acc_norm": 0.7170418006430869, "acc_norm_stderr": 0.02558306248998481 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7469135802469136, "acc_stderr": 0.024191808600712995, "acc_norm": 0.7469135802469136, "acc_norm_stderr": 0.024191808600712995 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.48936170212765956, "acc_stderr": 0.029820747191422473, "acc_norm": 0.48936170212765956, "acc_norm_stderr": 0.029820747191422473 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.47392438070404175, "acc_stderr": 0.012752858346533131, "acc_norm": 0.47392438070404175, "acc_norm_stderr": 0.012752858346533131 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6875, "acc_stderr": 0.02815637344037142, "acc_norm": 0.6875, "acc_norm_stderr": 0.02815637344037142 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6715686274509803, "acc_stderr": 0.018999707383162673, "acc_norm": 0.6715686274509803, "acc_norm_stderr": 0.018999707383162673 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7224489795918367, "acc_stderr": 0.028666857790274648, "acc_norm": 0.7224489795918367, "acc_norm_stderr": 0.028666857790274648 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8407960199004975, "acc_stderr": 0.025870646766169136, "acc_norm": 0.8407960199004975, "acc_norm_stderr": 0.025870646766169136 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.0348735088019777, "acc_norm": 0.86, "acc_norm_stderr": 0.0348735088019777 }, "harness|hendrycksTest-virology|5": { "acc": 0.5481927710843374, "acc_stderr": 0.03874371556587953, "acc_norm": 0.5481927710843374, "acc_norm_stderr": 0.03874371556587953 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8245614035087719, "acc_stderr": 0.029170885500727665, "acc_norm": 0.8245614035087719, "acc_norm_stderr": 0.029170885500727665 }, "harness|truthfulqa:mc|0": { "mc1": 0.5507955936352509, "mc1_stderr": 0.01741294198611529, "mc2": 0.6931209356367747, "mc2_stderr": 0.015031530031665238 }, "harness|winogrande|5": { "acc": 0.8200473559589582, "acc_stderr": 0.01079646868806868 }, "harness|gsm8k|5": { "acc": 0.7164518574677786, "acc_stderr": 0.012415070917508124 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_janai-hq__trinity-v1
[ "region:us" ]
2023-12-16T19:06:21+00:00
{"pretty_name": "Evaluation run of janai-hq/trinity-v1", "dataset_summary": "Dataset automatically created during the evaluation run of model [janai-hq/trinity-v1](https://huggingface.co/janai-hq/trinity-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_janai-hq__trinity-v1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T19:03:31.389271](https://huggingface.co/datasets/open-llm-leaderboard/details_janai-hq__trinity-v1/blob/main/results_2023-12-16T19-03-31.389271.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6575877329335247,\n \"acc_stderr\": 0.031985421208388404,\n \"acc_norm\": 0.6571647268300141,\n \"acc_norm_stderr\": 0.032648337921958155,\n \"mc1\": 0.5507955936352509,\n \"mc1_stderr\": 0.01741294198611529,\n \"mc2\": 0.6931209356367747,\n \"mc2_stderr\": 0.015031530031665238\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6988054607508533,\n \"acc_stderr\": 0.013406741767847632,\n \"acc_norm\": 0.7226962457337884,\n \"acc_norm_stderr\": 0.013082095839059376\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.711113324039036,\n \"acc_stderr\": 0.004523188431142894,\n \"acc_norm\": 0.8835889265086636,\n \"acc_norm_stderr\": 0.0032006176493464752\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6592592592592592,\n \"acc_stderr\": 0.040943762699967926,\n \"acc_norm\": 0.6592592592592592,\n \"acc_norm_stderr\": 0.040943762699967926\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6907894736842105,\n \"acc_stderr\": 0.037610708698674805,\n \"acc_norm\": 0.6907894736842105,\n \"acc_norm_stderr\": 0.037610708698674805\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.720754716981132,\n \"acc_stderr\": 0.027611163402399715,\n \"acc_norm\": 0.720754716981132,\n \"acc_norm_stderr\": 0.027611163402399715\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7708333333333334,\n \"acc_stderr\": 0.03514697467862388,\n \"acc_norm\": 0.7708333333333334,\n \"acc_norm_stderr\": 0.03514697467862388\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.049888765156985884,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.049888765156985884\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6994219653179191,\n \"acc_stderr\": 0.03496101481191179,\n \"acc_norm\": 0.6994219653179191,\n \"acc_norm_stderr\": 0.03496101481191179\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.46078431372549017,\n \"acc_stderr\": 0.04959859966384181,\n \"acc_norm\": 0.46078431372549017,\n \"acc_norm_stderr\": 0.04959859966384181\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.042295258468165065,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.042295258468165065\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5787234042553191,\n \"acc_stderr\": 0.03227834510146268,\n \"acc_norm\": 0.5787234042553191,\n \"acc_norm_stderr\": 0.03227834510146268\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5087719298245614,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.5087719298245614,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5655172413793104,\n \"acc_stderr\": 0.04130740879555498,\n \"acc_norm\": 0.5655172413793104,\n \"acc_norm_stderr\": 0.04130740879555498\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42328042328042326,\n \"acc_stderr\": 0.02544636563440678,\n \"acc_norm\": 0.42328042328042326,\n \"acc_norm_stderr\": 0.02544636563440678\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.46825396825396826,\n \"acc_stderr\": 0.04463112720677172,\n \"acc_norm\": 0.46825396825396826,\n \"acc_norm_stderr\": 0.04463112720677172\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252604,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252604\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7806451612903226,\n \"acc_stderr\": 0.023540799358723295,\n \"acc_norm\": 0.7806451612903226,\n \"acc_norm_stderr\": 0.023540799358723295\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5320197044334976,\n \"acc_stderr\": 0.03510766597959215,\n \"acc_norm\": 0.5320197044334976,\n \"acc_norm_stderr\": 0.03510766597959215\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.03256866661681102,\n \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.03256866661681102\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.029126522834586815,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.029126522834586815\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.02150024957603348,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.02150024957603348\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.023901157979402538,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.023901157979402538\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34074074074074073,\n \"acc_stderr\": 0.028897748741131154,\n \"acc_norm\": 0.34074074074074073,\n \"acc_norm_stderr\": 0.028897748741131154\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6722689075630253,\n \"acc_stderr\": 0.03048991141767323,\n \"acc_norm\": 0.6722689075630253,\n \"acc_norm_stderr\": 0.03048991141767323\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3509933774834437,\n \"acc_stderr\": 0.03896981964257375,\n \"acc_norm\": 0.3509933774834437,\n \"acc_norm_stderr\": 0.03896981964257375\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8532110091743119,\n \"acc_stderr\": 0.01517314184512625,\n \"acc_norm\": 0.8532110091743119,\n \"acc_norm_stderr\": 0.01517314184512625\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5416666666666666,\n \"acc_stderr\": 0.03398110890294636,\n \"acc_norm\": 0.5416666666666666,\n \"acc_norm_stderr\": 0.03398110890294636\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8333333333333334,\n \"acc_stderr\": 0.026156867523931045,\n \"acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.026156867523931045\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8059071729957806,\n \"acc_stderr\": 0.025744902532290916,\n \"acc_norm\": 0.8059071729957806,\n \"acc_norm_stderr\": 0.025744902532290916\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6905829596412556,\n \"acc_stderr\": 0.031024411740572213,\n \"acc_norm\": 0.6905829596412556,\n \"acc_norm_stderr\": 0.031024411740572213\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8244274809160306,\n \"acc_stderr\": 0.033368203384760736,\n \"acc_norm\": 0.8244274809160306,\n \"acc_norm_stderr\": 0.033368203384760736\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8016528925619835,\n \"acc_stderr\": 0.03640118271990947,\n \"acc_norm\": 0.8016528925619835,\n \"acc_norm_stderr\": 0.03640118271990947\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.040191074725573483,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.040191074725573483\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7791411042944786,\n \"acc_stderr\": 0.03259177392742179,\n \"acc_norm\": 0.7791411042944786,\n \"acc_norm_stderr\": 0.03259177392742179\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.41964285714285715,\n \"acc_stderr\": 0.046840993210771065,\n \"acc_norm\": 0.41964285714285715,\n \"acc_norm_stderr\": 0.046840993210771065\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7572815533980582,\n \"acc_stderr\": 0.04245022486384495,\n \"acc_norm\": 0.7572815533980582,\n \"acc_norm_stderr\": 0.04245022486384495\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8803418803418803,\n \"acc_stderr\": 0.021262719400406957,\n \"acc_norm\": 0.8803418803418803,\n \"acc_norm_stderr\": 0.021262719400406957\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8352490421455939,\n \"acc_stderr\": 0.013265346261323797,\n \"acc_norm\": 0.8352490421455939,\n \"acc_norm_stderr\": 0.013265346261323797\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7398843930635838,\n \"acc_stderr\": 0.023618678310069356,\n \"acc_norm\": 0.7398843930635838,\n \"acc_norm_stderr\": 0.023618678310069356\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4759776536312849,\n \"acc_stderr\": 0.016703190189300186,\n \"acc_norm\": 0.4759776536312849,\n \"acc_norm_stderr\": 0.016703190189300186\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.025646863097137897,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.025646863097137897\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7170418006430869,\n \"acc_stderr\": 0.02558306248998481,\n \"acc_norm\": 0.7170418006430869,\n \"acc_norm_stderr\": 0.02558306248998481\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7469135802469136,\n \"acc_stderr\": 0.024191808600712995,\n \"acc_norm\": 0.7469135802469136,\n \"acc_norm_stderr\": 0.024191808600712995\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.48936170212765956,\n \"acc_stderr\": 0.029820747191422473,\n \"acc_norm\": 0.48936170212765956,\n \"acc_norm_stderr\": 0.029820747191422473\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.47392438070404175,\n \"acc_stderr\": 0.012752858346533131,\n \"acc_norm\": 0.47392438070404175,\n \"acc_norm_stderr\": 0.012752858346533131\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6875,\n \"acc_stderr\": 0.02815637344037142,\n \"acc_norm\": 0.6875,\n \"acc_norm_stderr\": 0.02815637344037142\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6715686274509803,\n \"acc_stderr\": 0.018999707383162673,\n \"acc_norm\": 0.6715686274509803,\n \"acc_norm_stderr\": 0.018999707383162673\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7224489795918367,\n \"acc_stderr\": 0.028666857790274648,\n \"acc_norm\": 0.7224489795918367,\n \"acc_norm_stderr\": 0.028666857790274648\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.025870646766169136,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.025870646766169136\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.0348735088019777,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.0348735088019777\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5481927710843374,\n \"acc_stderr\": 0.03874371556587953,\n \"acc_norm\": 0.5481927710843374,\n \"acc_norm_stderr\": 0.03874371556587953\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8245614035087719,\n \"acc_stderr\": 0.029170885500727665,\n \"acc_norm\": 0.8245614035087719,\n \"acc_norm_stderr\": 0.029170885500727665\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5507955936352509,\n \"mc1_stderr\": 0.01741294198611529,\n \"mc2\": 0.6931209356367747,\n \"mc2_stderr\": 0.015031530031665238\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8200473559589582,\n \"acc_stderr\": 0.01079646868806868\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7164518574677786,\n \"acc_stderr\": 0.012415070917508124\n }\n}\n```", "repo_url": "https://huggingface.co/janai-hq/trinity-v1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|arc:challenge|25_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|gsm8k|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hellaswag|10_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T19-03-31.389271.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["**/details_harness|winogrande|5_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T19-03-31.389271.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T19_03_31.389271", "path": ["results_2023-12-16T19-03-31.389271.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T19-03-31.389271.parquet"]}]}]}
2023-12-16T19:07:02+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of janai-hq/trinity-v1 Dataset automatically created during the evaluation run of model janai-hq/trinity-v1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T19:03:31.389271(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of janai-hq/trinity-v1\n\n\n\nDataset automatically created during the evaluation run of model janai-hq/trinity-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T19:03:31.389271(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of janai-hq/trinity-v1\n\n\n\nDataset automatically created during the evaluation run of model janai-hq/trinity-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T19:03:31.389271(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 183, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of janai-hq/trinity-v1\n\n\n\nDataset automatically created during the evaluation run of model janai-hq/trinity-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T19:03:31.389271(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
0a809bd43a631295cf9db918c77e648aafdfc044
# Dataset Card for Evaluation run of maywell/PiVoT-10.7B-Mistral-v0.2 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [maywell/PiVoT-10.7B-Mistral-v0.2](https://huggingface.co/maywell/PiVoT-10.7B-Mistral-v0.2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_maywell__PiVoT-10.7B-Mistral-v0.2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T19:05:37.712893](https://huggingface.co/datasets/open-llm-leaderboard/details_maywell__PiVoT-10.7B-Mistral-v0.2/blob/main/results_2023-12-16T19-05-37.712893.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5992040625455914, "acc_stderr": 0.03324031031237355, "acc_norm": 0.6028778357395081, "acc_norm_stderr": 0.033924366555740444, "mc1": 0.4186046511627907, "mc1_stderr": 0.01727001528447686, "mc2": 0.5823109285763256, "mc2_stderr": 0.01521353248750615 }, "harness|arc:challenge|25": { "acc": 0.591296928327645, "acc_stderr": 0.014365750345427, "acc_norm": 0.6331058020477816, "acc_norm_stderr": 0.0140841331181043 }, "harness|hellaswag|10": { "acc": 0.6161123282214698, "acc_stderr": 0.0048533716462392466, "acc_norm": 0.8167695678151763, "acc_norm_stderr": 0.0038606469988972836 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5925925925925926, "acc_stderr": 0.04244633238353228, "acc_norm": 0.5925925925925926, "acc_norm_stderr": 0.04244633238353228 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6513157894736842, "acc_stderr": 0.03878139888797611, "acc_norm": 0.6513157894736842, "acc_norm_stderr": 0.03878139888797611 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6641509433962264, "acc_stderr": 0.02906722014664483, "acc_norm": 0.6641509433962264, "acc_norm_stderr": 0.02906722014664483 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6875, "acc_stderr": 0.038760854559127644, "acc_norm": 0.6875, "acc_norm_stderr": 0.038760854559127644 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5895953757225434, "acc_stderr": 0.03750757044895537, "acc_norm": 0.5895953757225434, "acc_norm_stderr": 0.03750757044895537 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.43137254901960786, "acc_stderr": 0.04928099597287533, "acc_norm": 0.43137254901960786, "acc_norm_stderr": 0.04928099597287533 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.48936170212765956, "acc_stderr": 0.03267862331014063, "acc_norm": 0.48936170212765956, "acc_norm_stderr": 0.03267862331014063 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.39473684210526316, "acc_stderr": 0.045981880578165414, "acc_norm": 0.39473684210526316, "acc_norm_stderr": 0.045981880578165414 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5172413793103449, "acc_stderr": 0.04164188720169375, "acc_norm": 0.5172413793103449, "acc_norm_stderr": 0.04164188720169375 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4312169312169312, "acc_stderr": 0.025506481698138204, "acc_norm": 0.4312169312169312, "acc_norm_stderr": 0.025506481698138204 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4523809523809524, "acc_stderr": 0.044518079590553275, "acc_norm": 0.4523809523809524, "acc_norm_stderr": 0.044518079590553275 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.41, "acc_stderr": 0.04943110704237102, "acc_norm": 0.41, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6516129032258065, "acc_stderr": 0.02710482632810094, "acc_norm": 0.6516129032258065, "acc_norm_stderr": 0.02710482632810094 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.458128078817734, "acc_stderr": 0.03505630140785741, "acc_norm": 0.458128078817734, "acc_norm_stderr": 0.03505630140785741 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.66, "acc_stderr": 0.04760952285695237, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695237 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7636363636363637, "acc_stderr": 0.03317505930009181, "acc_norm": 0.7636363636363637, "acc_norm_stderr": 0.03317505930009181 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7676767676767676, "acc_stderr": 0.030088629490217487, "acc_norm": 0.7676767676767676, "acc_norm_stderr": 0.030088629490217487 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8652849740932642, "acc_stderr": 0.024639789097709443, "acc_norm": 0.8652849740932642, "acc_norm_stderr": 0.024639789097709443 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5846153846153846, "acc_stderr": 0.02498535492310234, "acc_norm": 0.5846153846153846, "acc_norm_stderr": 0.02498535492310234 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3111111111111111, "acc_stderr": 0.028226446749683512, "acc_norm": 0.3111111111111111, "acc_norm_stderr": 0.028226446749683512 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6974789915966386, "acc_stderr": 0.029837962388291936, "acc_norm": 0.6974789915966386, "acc_norm_stderr": 0.029837962388291936 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2980132450331126, "acc_stderr": 0.037345356767871984, "acc_norm": 0.2980132450331126, "acc_norm_stderr": 0.037345356767871984 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8055045871559633, "acc_stderr": 0.01697028909045804, "acc_norm": 0.8055045871559633, "acc_norm_stderr": 0.01697028909045804 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5138888888888888, "acc_stderr": 0.03408655867977749, "acc_norm": 0.5138888888888888, "acc_norm_stderr": 0.03408655867977749 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7892156862745098, "acc_stderr": 0.0286265479124374, "acc_norm": 0.7892156862745098, "acc_norm_stderr": 0.0286265479124374 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7763713080168776, "acc_stderr": 0.027123298205229966, "acc_norm": 0.7763713080168776, "acc_norm_stderr": 0.027123298205229966 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6860986547085202, "acc_stderr": 0.03114679648297246, "acc_norm": 0.6860986547085202, "acc_norm_stderr": 0.03114679648297246 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6412213740458015, "acc_stderr": 0.04206739313864908, "acc_norm": 0.6412213740458015, "acc_norm_stderr": 0.04206739313864908 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7603305785123967, "acc_stderr": 0.038968789850704164, "acc_norm": 0.7603305785123967, "acc_norm_stderr": 0.038968789850704164 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7314814814814815, "acc_stderr": 0.042844679680521934, "acc_norm": 0.7314814814814815, "acc_norm_stderr": 0.042844679680521934 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6932515337423313, "acc_stderr": 0.03623089915724146, "acc_norm": 0.6932515337423313, "acc_norm_stderr": 0.03623089915724146 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.45535714285714285, "acc_stderr": 0.047268355537191, "acc_norm": 0.45535714285714285, "acc_norm_stderr": 0.047268355537191 }, "harness|hendrycksTest-management|5": { "acc": 0.7475728155339806, "acc_stderr": 0.04301250399690878, "acc_norm": 0.7475728155339806, "acc_norm_stderr": 0.04301250399690878 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8760683760683761, "acc_stderr": 0.02158649400128137, "acc_norm": 0.8760683760683761, "acc_norm_stderr": 0.02158649400128137 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.68, "acc_stderr": 0.046882617226215034, "acc_norm": 0.68, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7828863346104725, "acc_stderr": 0.014743125394823302, "acc_norm": 0.7828863346104725, "acc_norm_stderr": 0.014743125394823302 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6242774566473989, "acc_stderr": 0.02607431485165708, "acc_norm": 0.6242774566473989, "acc_norm_stderr": 0.02607431485165708 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.27150837988826815, "acc_stderr": 0.014874252168095275, "acc_norm": 0.27150837988826815, "acc_norm_stderr": 0.014874252168095275 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6699346405228758, "acc_stderr": 0.026925654653615697, "acc_norm": 0.6699346405228758, "acc_norm_stderr": 0.026925654653615697 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6720257234726688, "acc_stderr": 0.02666441088693762, "acc_norm": 0.6720257234726688, "acc_norm_stderr": 0.02666441088693762 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6604938271604939, "acc_stderr": 0.026348564412011624, "acc_norm": 0.6604938271604939, "acc_norm_stderr": 0.026348564412011624 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.44680851063829785, "acc_stderr": 0.029658235097666907, "acc_norm": 0.44680851063829785, "acc_norm_stderr": 0.029658235097666907 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.44132985658409385, "acc_stderr": 0.01268201633564667, "acc_norm": 0.44132985658409385, "acc_norm_stderr": 0.01268201633564667 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5845588235294118, "acc_stderr": 0.029935342707877746, "acc_norm": 0.5845588235294118, "acc_norm_stderr": 0.029935342707877746 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6274509803921569, "acc_stderr": 0.019559646809215923, "acc_norm": 0.6274509803921569, "acc_norm_stderr": 0.019559646809215923 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6454545454545455, "acc_stderr": 0.045820048415054174, "acc_norm": 0.6454545454545455, "acc_norm_stderr": 0.045820048415054174 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.5673469387755102, "acc_stderr": 0.031717528240626645, "acc_norm": 0.5673469387755102, "acc_norm_stderr": 0.031717528240626645 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7860696517412935, "acc_stderr": 0.028996909693328913, "acc_norm": 0.7860696517412935, "acc_norm_stderr": 0.028996909693328913 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.76, "acc_stderr": 0.042923469599092816, "acc_norm": 0.76, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-virology|5": { "acc": 0.5, "acc_stderr": 0.03892494720807614, "acc_norm": 0.5, "acc_norm_stderr": 0.03892494720807614 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7543859649122807, "acc_stderr": 0.0330140594698725, "acc_norm": 0.7543859649122807, "acc_norm_stderr": 0.0330140594698725 }, "harness|truthfulqa:mc|0": { "mc1": 0.4186046511627907, "mc1_stderr": 0.01727001528447686, "mc2": 0.5823109285763256, "mc2_stderr": 0.01521353248750615 }, "harness|winogrande|5": { "acc": 0.8003157063930545, "acc_stderr": 0.011235328382625849 }, "harness|gsm8k|5": { "acc": 0.42380591357088704, "acc_stderr": 0.01361163200881036 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_maywell__PiVoT-10.7B-Mistral-v0.2
[ "region:us" ]
2023-12-16T19:08:32+00:00
{"pretty_name": "Evaluation run of maywell/PiVoT-10.7B-Mistral-v0.2", "dataset_summary": "Dataset automatically created during the evaluation run of model [maywell/PiVoT-10.7B-Mistral-v0.2](https://huggingface.co/maywell/PiVoT-10.7B-Mistral-v0.2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_maywell__PiVoT-10.7B-Mistral-v0.2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T19:05:37.712893](https://huggingface.co/datasets/open-llm-leaderboard/details_maywell__PiVoT-10.7B-Mistral-v0.2/blob/main/results_2023-12-16T19-05-37.712893.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5992040625455914,\n \"acc_stderr\": 0.03324031031237355,\n \"acc_norm\": 0.6028778357395081,\n \"acc_norm_stderr\": 0.033924366555740444,\n \"mc1\": 0.4186046511627907,\n \"mc1_stderr\": 0.01727001528447686,\n \"mc2\": 0.5823109285763256,\n \"mc2_stderr\": 0.01521353248750615\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.591296928327645,\n \"acc_stderr\": 0.014365750345427,\n \"acc_norm\": 0.6331058020477816,\n \"acc_norm_stderr\": 0.0140841331181043\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6161123282214698,\n \"acc_stderr\": 0.0048533716462392466,\n \"acc_norm\": 0.8167695678151763,\n \"acc_norm_stderr\": 0.0038606469988972836\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5925925925925926,\n \"acc_stderr\": 0.04244633238353228,\n \"acc_norm\": 0.5925925925925926,\n \"acc_norm_stderr\": 0.04244633238353228\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6513157894736842,\n \"acc_stderr\": 0.03878139888797611,\n \"acc_norm\": 0.6513157894736842,\n \"acc_norm_stderr\": 0.03878139888797611\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6641509433962264,\n \"acc_stderr\": 0.02906722014664483,\n \"acc_norm\": 0.6641509433962264,\n \"acc_norm_stderr\": 0.02906722014664483\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6875,\n \"acc_stderr\": 0.038760854559127644,\n \"acc_norm\": 0.6875,\n \"acc_norm_stderr\": 0.038760854559127644\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5895953757225434,\n \"acc_stderr\": 0.03750757044895537,\n \"acc_norm\": 0.5895953757225434,\n \"acc_norm_stderr\": 0.03750757044895537\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.43137254901960786,\n \"acc_stderr\": 0.04928099597287533,\n \"acc_norm\": 0.43137254901960786,\n \"acc_norm_stderr\": 0.04928099597287533\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.48936170212765956,\n \"acc_stderr\": 0.03267862331014063,\n \"acc_norm\": 0.48936170212765956,\n \"acc_norm_stderr\": 0.03267862331014063\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.39473684210526316,\n \"acc_stderr\": 0.045981880578165414,\n \"acc_norm\": 0.39473684210526316,\n \"acc_norm_stderr\": 0.045981880578165414\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5172413793103449,\n \"acc_stderr\": 0.04164188720169375,\n \"acc_norm\": 0.5172413793103449,\n \"acc_norm_stderr\": 0.04164188720169375\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4312169312169312,\n \"acc_stderr\": 0.025506481698138204,\n \"acc_norm\": 0.4312169312169312,\n \"acc_norm_stderr\": 0.025506481698138204\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4523809523809524,\n \"acc_stderr\": 0.044518079590553275,\n \"acc_norm\": 0.4523809523809524,\n \"acc_norm_stderr\": 0.044518079590553275\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6516129032258065,\n \"acc_stderr\": 0.02710482632810094,\n \"acc_norm\": 0.6516129032258065,\n \"acc_norm_stderr\": 0.02710482632810094\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.458128078817734,\n \"acc_stderr\": 0.03505630140785741,\n \"acc_norm\": 0.458128078817734,\n \"acc_norm_stderr\": 0.03505630140785741\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695237,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695237\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7636363636363637,\n \"acc_stderr\": 0.03317505930009181,\n \"acc_norm\": 0.7636363636363637,\n \"acc_norm_stderr\": 0.03317505930009181\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7676767676767676,\n \"acc_stderr\": 0.030088629490217487,\n \"acc_norm\": 0.7676767676767676,\n \"acc_norm_stderr\": 0.030088629490217487\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8652849740932642,\n \"acc_stderr\": 0.024639789097709443,\n \"acc_norm\": 0.8652849740932642,\n \"acc_norm_stderr\": 0.024639789097709443\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5846153846153846,\n \"acc_stderr\": 0.02498535492310234,\n \"acc_norm\": 0.5846153846153846,\n \"acc_norm_stderr\": 0.02498535492310234\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3111111111111111,\n \"acc_stderr\": 0.028226446749683512,\n \"acc_norm\": 0.3111111111111111,\n \"acc_norm_stderr\": 0.028226446749683512\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6974789915966386,\n \"acc_stderr\": 0.029837962388291936,\n \"acc_norm\": 0.6974789915966386,\n \"acc_norm_stderr\": 0.029837962388291936\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2980132450331126,\n \"acc_stderr\": 0.037345356767871984,\n \"acc_norm\": 0.2980132450331126,\n \"acc_norm_stderr\": 0.037345356767871984\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8055045871559633,\n \"acc_stderr\": 0.01697028909045804,\n \"acc_norm\": 0.8055045871559633,\n \"acc_norm_stderr\": 0.01697028909045804\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5138888888888888,\n \"acc_stderr\": 0.03408655867977749,\n \"acc_norm\": 0.5138888888888888,\n \"acc_norm_stderr\": 0.03408655867977749\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7892156862745098,\n \"acc_stderr\": 0.0286265479124374,\n \"acc_norm\": 0.7892156862745098,\n \"acc_norm_stderr\": 0.0286265479124374\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7763713080168776,\n \"acc_stderr\": 0.027123298205229966,\n \"acc_norm\": 0.7763713080168776,\n \"acc_norm_stderr\": 0.027123298205229966\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6860986547085202,\n \"acc_stderr\": 0.03114679648297246,\n \"acc_norm\": 0.6860986547085202,\n \"acc_norm_stderr\": 0.03114679648297246\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6412213740458015,\n \"acc_stderr\": 0.04206739313864908,\n \"acc_norm\": 0.6412213740458015,\n \"acc_norm_stderr\": 0.04206739313864908\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7603305785123967,\n \"acc_stderr\": 0.038968789850704164,\n \"acc_norm\": 0.7603305785123967,\n \"acc_norm_stderr\": 0.038968789850704164\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7314814814814815,\n \"acc_stderr\": 0.042844679680521934,\n \"acc_norm\": 0.7314814814814815,\n \"acc_norm_stderr\": 0.042844679680521934\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6932515337423313,\n \"acc_stderr\": 0.03623089915724146,\n \"acc_norm\": 0.6932515337423313,\n \"acc_norm_stderr\": 0.03623089915724146\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.45535714285714285,\n \"acc_stderr\": 0.047268355537191,\n \"acc_norm\": 0.45535714285714285,\n \"acc_norm_stderr\": 0.047268355537191\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7475728155339806,\n \"acc_stderr\": 0.04301250399690878,\n \"acc_norm\": 0.7475728155339806,\n \"acc_norm_stderr\": 0.04301250399690878\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8760683760683761,\n \"acc_stderr\": 0.02158649400128137,\n \"acc_norm\": 0.8760683760683761,\n \"acc_norm_stderr\": 0.02158649400128137\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7828863346104725,\n \"acc_stderr\": 0.014743125394823302,\n \"acc_norm\": 0.7828863346104725,\n \"acc_norm_stderr\": 0.014743125394823302\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6242774566473989,\n \"acc_stderr\": 0.02607431485165708,\n \"acc_norm\": 0.6242774566473989,\n \"acc_norm_stderr\": 0.02607431485165708\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.27150837988826815,\n \"acc_stderr\": 0.014874252168095275,\n \"acc_norm\": 0.27150837988826815,\n \"acc_norm_stderr\": 0.014874252168095275\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6699346405228758,\n \"acc_stderr\": 0.026925654653615697,\n \"acc_norm\": 0.6699346405228758,\n \"acc_norm_stderr\": 0.026925654653615697\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6720257234726688,\n \"acc_stderr\": 0.02666441088693762,\n \"acc_norm\": 0.6720257234726688,\n \"acc_norm_stderr\": 0.02666441088693762\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6604938271604939,\n \"acc_stderr\": 0.026348564412011624,\n \"acc_norm\": 0.6604938271604939,\n \"acc_norm_stderr\": 0.026348564412011624\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.44680851063829785,\n \"acc_stderr\": 0.029658235097666907,\n \"acc_norm\": 0.44680851063829785,\n \"acc_norm_stderr\": 0.029658235097666907\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.44132985658409385,\n \"acc_stderr\": 0.01268201633564667,\n \"acc_norm\": 0.44132985658409385,\n \"acc_norm_stderr\": 0.01268201633564667\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5845588235294118,\n \"acc_stderr\": 0.029935342707877746,\n \"acc_norm\": 0.5845588235294118,\n \"acc_norm_stderr\": 0.029935342707877746\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6274509803921569,\n \"acc_stderr\": 0.019559646809215923,\n \"acc_norm\": 0.6274509803921569,\n \"acc_norm_stderr\": 0.019559646809215923\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6454545454545455,\n \"acc_stderr\": 0.045820048415054174,\n \"acc_norm\": 0.6454545454545455,\n \"acc_norm_stderr\": 0.045820048415054174\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.5673469387755102,\n \"acc_stderr\": 0.031717528240626645,\n \"acc_norm\": 0.5673469387755102,\n \"acc_norm_stderr\": 0.031717528240626645\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7860696517412935,\n \"acc_stderr\": 0.028996909693328913,\n \"acc_norm\": 0.7860696517412935,\n \"acc_norm_stderr\": 0.028996909693328913\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.03892494720807614,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.03892494720807614\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7543859649122807,\n \"acc_stderr\": 0.0330140594698725,\n \"acc_norm\": 0.7543859649122807,\n \"acc_norm_stderr\": 0.0330140594698725\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4186046511627907,\n \"mc1_stderr\": 0.01727001528447686,\n \"mc2\": 0.5823109285763256,\n \"mc2_stderr\": 0.01521353248750615\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8003157063930545,\n \"acc_stderr\": 0.011235328382625849\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.42380591357088704,\n \"acc_stderr\": 0.01361163200881036\n }\n}\n```", "repo_url": "https://huggingface.co/maywell/PiVoT-10.7B-Mistral-v0.2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|arc:challenge|25_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|gsm8k|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hellaswag|10_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T19-05-37.712893.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["**/details_harness|winogrande|5_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T19-05-37.712893.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T19_05_37.712893", "path": ["results_2023-12-16T19-05-37.712893.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T19-05-37.712893.parquet"]}]}]}
2023-12-16T19:09:12+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of maywell/PiVoT-10.7B-Mistral-v0.2 Dataset automatically created during the evaluation run of model maywell/PiVoT-10.7B-Mistral-v0.2 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T19:05:37.712893(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of maywell/PiVoT-10.7B-Mistral-v0.2\n\n\n\nDataset automatically created during the evaluation run of model maywell/PiVoT-10.7B-Mistral-v0.2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T19:05:37.712893(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of maywell/PiVoT-10.7B-Mistral-v0.2\n\n\n\nDataset automatically created during the evaluation run of model maywell/PiVoT-10.7B-Mistral-v0.2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T19:05:37.712893(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 193, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of maywell/PiVoT-10.7B-Mistral-v0.2\n\n\n\nDataset automatically created during the evaluation run of model maywell/PiVoT-10.7B-Mistral-v0.2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T19:05:37.712893(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
133d8a60274213849d7c832004ce55b3b4bc553b
# Dataset Card for Evaluation run of Locutusque/Orca-2-13b-SFT_v5 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Locutusque/Orca-2-13b-SFT_v5](https://huggingface.co/Locutusque/Orca-2-13b-SFT_v5) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Locutusque__Orca-2-13b-SFT_v5", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T19:07:23.375645](https://huggingface.co/datasets/open-llm-leaderboard/details_Locutusque__Orca-2-13b-SFT_v5/blob/main/results_2023-12-16T19-07-23.375645.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5961306056367279, "acc_stderr": 0.03296294817457722, "acc_norm": 0.6051075721265253, "acc_norm_stderr": 0.03374567474717863, "mc1": 0.36474908200734396, "mc1_stderr": 0.01685096106172012, "mc2": 0.5183588707836261, "mc2_stderr": 0.0149463233822155 }, "harness|arc:challenge|25": { "acc": 0.5546075085324232, "acc_stderr": 0.014523987638344078, "acc_norm": 0.5921501706484642, "acc_norm_stderr": 0.014361097288449708 }, "harness|hellaswag|10": { "acc": 0.6079466241784505, "acc_stderr": 0.004872107262082463, "acc_norm": 0.8009360685122485, "acc_norm_stderr": 0.003984801854418762 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6518518518518519, "acc_stderr": 0.041153246103369526, "acc_norm": 0.6518518518518519, "acc_norm_stderr": 0.041153246103369526 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.756578947368421, "acc_stderr": 0.034923496688842384, "acc_norm": 0.756578947368421, "acc_norm_stderr": 0.034923496688842384 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6188679245283019, "acc_stderr": 0.029890609686286634, "acc_norm": 0.6188679245283019, "acc_norm_stderr": 0.029890609686286634 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6805555555555556, "acc_stderr": 0.038990736873573344, "acc_norm": 0.6805555555555556, "acc_norm_stderr": 0.038990736873573344 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.43, "acc_stderr": 0.04975698519562428, "acc_norm": 0.43, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5375722543352601, "acc_stderr": 0.0380168510452446, "acc_norm": 0.5375722543352601, "acc_norm_stderr": 0.0380168510452446 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.04690650298201943, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.04690650298201943 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.66, "acc_stderr": 0.04760952285695237, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695237 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4851063829787234, "acc_stderr": 0.03267151848924777, "acc_norm": 0.4851063829787234, "acc_norm_stderr": 0.03267151848924777 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3157894736842105, "acc_stderr": 0.04372748290278007, "acc_norm": 0.3157894736842105, "acc_norm_stderr": 0.04372748290278007 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5724137931034483, "acc_stderr": 0.041227371113703316, "acc_norm": 0.5724137931034483, "acc_norm_stderr": 0.041227371113703316 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.36507936507936506, "acc_stderr": 0.024796060602699958, "acc_norm": 0.36507936507936506, "acc_norm_stderr": 0.024796060602699958 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.31746031746031744, "acc_stderr": 0.04163453031302859, "acc_norm": 0.31746031746031744, "acc_norm_stderr": 0.04163453031302859 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7161290322580646, "acc_stderr": 0.02564938106302926, "acc_norm": 0.7161290322580646, "acc_norm_stderr": 0.02564938106302926 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4827586206896552, "acc_stderr": 0.035158955511656986, "acc_norm": 0.4827586206896552, "acc_norm_stderr": 0.035158955511656986 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.65, "acc_stderr": 0.04793724854411019, "acc_norm": 0.65, "acc_norm_stderr": 0.04793724854411019 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7515151515151515, "acc_stderr": 0.03374402644139404, "acc_norm": 0.7515151515151515, "acc_norm_stderr": 0.03374402644139404 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7525252525252525, "acc_stderr": 0.030746300742124495, "acc_norm": 0.7525252525252525, "acc_norm_stderr": 0.030746300742124495 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8393782383419689, "acc_stderr": 0.02649905770139744, "acc_norm": 0.8393782383419689, "acc_norm_stderr": 0.02649905770139744 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5666666666666667, "acc_stderr": 0.02512465352588513, "acc_norm": 0.5666666666666667, "acc_norm_stderr": 0.02512465352588513 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3296296296296296, "acc_stderr": 0.028661201116524586, "acc_norm": 0.3296296296296296, "acc_norm_stderr": 0.028661201116524586 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6134453781512605, "acc_stderr": 0.03163145807552378, "acc_norm": 0.6134453781512605, "acc_norm_stderr": 0.03163145807552378 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.37748344370860926, "acc_stderr": 0.0395802723112157, "acc_norm": 0.37748344370860926, "acc_norm_stderr": 0.0395802723112157 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7871559633027523, "acc_stderr": 0.017549376389313694, "acc_norm": 0.7871559633027523, "acc_norm_stderr": 0.017549376389313694 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.49074074074074076, "acc_stderr": 0.034093869469927006, "acc_norm": 0.49074074074074076, "acc_norm_stderr": 0.034093869469927006 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7941176470588235, "acc_stderr": 0.028379449451588663, "acc_norm": 0.7941176470588235, "acc_norm_stderr": 0.028379449451588663 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8143459915611815, "acc_stderr": 0.025310495376944853, "acc_norm": 0.8143459915611815, "acc_norm_stderr": 0.025310495376944853 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6591928251121076, "acc_stderr": 0.031811497470553604, "acc_norm": 0.6591928251121076, "acc_norm_stderr": 0.031811497470553604 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7175572519083969, "acc_stderr": 0.03948406125768361, "acc_norm": 0.7175572519083969, "acc_norm_stderr": 0.03948406125768361 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7768595041322314, "acc_stderr": 0.03800754475228732, "acc_norm": 0.7768595041322314, "acc_norm_stderr": 0.03800754475228732 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7685185185185185, "acc_stderr": 0.04077494709252627, "acc_norm": 0.7685185185185185, "acc_norm_stderr": 0.04077494709252627 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7055214723926381, "acc_stderr": 0.03581165790474082, "acc_norm": 0.7055214723926381, "acc_norm_stderr": 0.03581165790474082 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.39285714285714285, "acc_stderr": 0.04635550135609976, "acc_norm": 0.39285714285714285, "acc_norm_stderr": 0.04635550135609976 }, "harness|hendrycksTest-management|5": { "acc": 0.7378640776699029, "acc_stderr": 0.04354631077260597, "acc_norm": 0.7378640776699029, "acc_norm_stderr": 0.04354631077260597 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8547008547008547, "acc_stderr": 0.023086635086841407, "acc_norm": 0.8547008547008547, "acc_norm_stderr": 0.023086635086841407 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836, "acc_norm": 0.58, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7790549169859514, "acc_stderr": 0.01483620516733357, "acc_norm": 0.7790549169859514, "acc_norm_stderr": 0.01483620516733357 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6936416184971098, "acc_stderr": 0.024818350129436593, "acc_norm": 0.6936416184971098, "acc_norm_stderr": 0.024818350129436593 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.32849162011173183, "acc_stderr": 0.01570793539849646, "acc_norm": 0.32849162011173183, "acc_norm_stderr": 0.01570793539849646 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6830065359477124, "acc_stderr": 0.02664327847450875, "acc_norm": 0.6830065359477124, "acc_norm_stderr": 0.02664327847450875 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6720257234726688, "acc_stderr": 0.026664410886937624, "acc_norm": 0.6720257234726688, "acc_norm_stderr": 0.026664410886937624 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7253086419753086, "acc_stderr": 0.024836057868294677, "acc_norm": 0.7253086419753086, "acc_norm_stderr": 0.024836057868294677 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.44680851063829785, "acc_stderr": 0.029658235097666904, "acc_norm": 0.44680851063829785, "acc_norm_stderr": 0.029658235097666904 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4315514993481095, "acc_stderr": 0.012650007999463878, "acc_norm": 0.4315514993481095, "acc_norm_stderr": 0.012650007999463878 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5772058823529411, "acc_stderr": 0.030008562845003476, "acc_norm": 0.5772058823529411, "acc_norm_stderr": 0.030008562845003476 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6094771241830066, "acc_stderr": 0.0197370089980946, "acc_norm": 0.6094771241830066, "acc_norm_stderr": 0.0197370089980946 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6454545454545455, "acc_stderr": 0.045820048415054174, "acc_norm": 0.6454545454545455, "acc_norm_stderr": 0.045820048415054174 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.710204081632653, "acc_stderr": 0.02904308868330434, "acc_norm": 0.710204081632653, "acc_norm_stderr": 0.02904308868330434 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7313432835820896, "acc_stderr": 0.03134328358208954, "acc_norm": 0.7313432835820896, "acc_norm_stderr": 0.03134328358208954 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.8, "acc_stderr": 0.04020151261036846, "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036846 }, "harness|hendrycksTest-virology|5": { "acc": 0.5240963855421686, "acc_stderr": 0.03887971849597264, "acc_norm": 0.5240963855421686, "acc_norm_stderr": 0.03887971849597264 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8070175438596491, "acc_stderr": 0.030267457554898458, "acc_norm": 0.8070175438596491, "acc_norm_stderr": 0.030267457554898458 }, "harness|truthfulqa:mc|0": { "mc1": 0.36474908200734396, "mc1_stderr": 0.01685096106172012, "mc2": 0.5183588707836261, "mc2_stderr": 0.0149463233822155 }, "harness|winogrande|5": { "acc": 0.8089976322020521, "acc_stderr": 0.011047808761510427 }, "harness|gsm8k|5": { "acc": 0.0841546626231994, "acc_stderr": 0.0076470240466032045 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Locutusque__Orca-2-13b-SFT_v5
[ "region:us" ]
2023-12-16T19:10:26+00:00
{"pretty_name": "Evaluation run of Locutusque/Orca-2-13b-SFT_v5", "dataset_summary": "Dataset automatically created during the evaluation run of model [Locutusque/Orca-2-13b-SFT_v5](https://huggingface.co/Locutusque/Orca-2-13b-SFT_v5) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Locutusque__Orca-2-13b-SFT_v5\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T19:07:23.375645](https://huggingface.co/datasets/open-llm-leaderboard/details_Locutusque__Orca-2-13b-SFT_v5/blob/main/results_2023-12-16T19-07-23.375645.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5961306056367279,\n \"acc_stderr\": 0.03296294817457722,\n \"acc_norm\": 0.6051075721265253,\n \"acc_norm_stderr\": 0.03374567474717863,\n \"mc1\": 0.36474908200734396,\n \"mc1_stderr\": 0.01685096106172012,\n \"mc2\": 0.5183588707836261,\n \"mc2_stderr\": 0.0149463233822155\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5546075085324232,\n \"acc_stderr\": 0.014523987638344078,\n \"acc_norm\": 0.5921501706484642,\n \"acc_norm_stderr\": 0.014361097288449708\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6079466241784505,\n \"acc_stderr\": 0.004872107262082463,\n \"acc_norm\": 0.8009360685122485,\n \"acc_norm_stderr\": 0.003984801854418762\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6518518518518519,\n \"acc_stderr\": 0.041153246103369526,\n \"acc_norm\": 0.6518518518518519,\n \"acc_norm_stderr\": 0.041153246103369526\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.756578947368421,\n \"acc_stderr\": 0.034923496688842384,\n \"acc_norm\": 0.756578947368421,\n \"acc_norm_stderr\": 0.034923496688842384\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6188679245283019,\n \"acc_stderr\": 0.029890609686286634,\n \"acc_norm\": 0.6188679245283019,\n \"acc_norm_stderr\": 0.029890609686286634\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6805555555555556,\n \"acc_stderr\": 0.038990736873573344,\n \"acc_norm\": 0.6805555555555556,\n \"acc_norm_stderr\": 0.038990736873573344\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5375722543352601,\n \"acc_stderr\": 0.0380168510452446,\n \"acc_norm\": 0.5375722543352601,\n \"acc_norm_stderr\": 0.0380168510452446\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.04690650298201943,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.04690650298201943\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695237,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695237\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.4851063829787234,\n \"acc_stderr\": 0.03267151848924777,\n \"acc_norm\": 0.4851063829787234,\n \"acc_norm_stderr\": 0.03267151848924777\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3157894736842105,\n \"acc_stderr\": 0.04372748290278007,\n \"acc_norm\": 0.3157894736842105,\n \"acc_norm_stderr\": 0.04372748290278007\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5724137931034483,\n \"acc_stderr\": 0.041227371113703316,\n \"acc_norm\": 0.5724137931034483,\n \"acc_norm_stderr\": 0.041227371113703316\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.36507936507936506,\n \"acc_stderr\": 0.024796060602699958,\n \"acc_norm\": 0.36507936507936506,\n \"acc_norm_stderr\": 0.024796060602699958\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.31746031746031744,\n \"acc_stderr\": 0.04163453031302859,\n \"acc_norm\": 0.31746031746031744,\n \"acc_norm_stderr\": 0.04163453031302859\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7161290322580646,\n \"acc_stderr\": 0.02564938106302926,\n \"acc_norm\": 0.7161290322580646,\n \"acc_norm_stderr\": 0.02564938106302926\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4827586206896552,\n \"acc_stderr\": 0.035158955511656986,\n \"acc_norm\": 0.4827586206896552,\n \"acc_norm_stderr\": 0.035158955511656986\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.04793724854411019,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.04793724854411019\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7515151515151515,\n \"acc_stderr\": 0.03374402644139404,\n \"acc_norm\": 0.7515151515151515,\n \"acc_norm_stderr\": 0.03374402644139404\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7525252525252525,\n \"acc_stderr\": 0.030746300742124495,\n \"acc_norm\": 0.7525252525252525,\n \"acc_norm_stderr\": 0.030746300742124495\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8393782383419689,\n \"acc_stderr\": 0.02649905770139744,\n \"acc_norm\": 0.8393782383419689,\n \"acc_norm_stderr\": 0.02649905770139744\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5666666666666667,\n \"acc_stderr\": 0.02512465352588513,\n \"acc_norm\": 0.5666666666666667,\n \"acc_norm_stderr\": 0.02512465352588513\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3296296296296296,\n \"acc_stderr\": 0.028661201116524586,\n \"acc_norm\": 0.3296296296296296,\n \"acc_norm_stderr\": 0.028661201116524586\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6134453781512605,\n \"acc_stderr\": 0.03163145807552378,\n \"acc_norm\": 0.6134453781512605,\n \"acc_norm_stderr\": 0.03163145807552378\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.37748344370860926,\n \"acc_stderr\": 0.0395802723112157,\n \"acc_norm\": 0.37748344370860926,\n \"acc_norm_stderr\": 0.0395802723112157\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7871559633027523,\n \"acc_stderr\": 0.017549376389313694,\n \"acc_norm\": 0.7871559633027523,\n \"acc_norm_stderr\": 0.017549376389313694\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.49074074074074076,\n \"acc_stderr\": 0.034093869469927006,\n \"acc_norm\": 0.49074074074074076,\n \"acc_norm_stderr\": 0.034093869469927006\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7941176470588235,\n \"acc_stderr\": 0.028379449451588663,\n \"acc_norm\": 0.7941176470588235,\n \"acc_norm_stderr\": 0.028379449451588663\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8143459915611815,\n \"acc_stderr\": 0.025310495376944853,\n \"acc_norm\": 0.8143459915611815,\n \"acc_norm_stderr\": 0.025310495376944853\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6591928251121076,\n \"acc_stderr\": 0.031811497470553604,\n \"acc_norm\": 0.6591928251121076,\n \"acc_norm_stderr\": 0.031811497470553604\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7175572519083969,\n \"acc_stderr\": 0.03948406125768361,\n \"acc_norm\": 0.7175572519083969,\n \"acc_norm_stderr\": 0.03948406125768361\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228732,\n \"acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228732\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7685185185185185,\n \"acc_stderr\": 0.04077494709252627,\n \"acc_norm\": 0.7685185185185185,\n \"acc_norm_stderr\": 0.04077494709252627\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7055214723926381,\n \"acc_stderr\": 0.03581165790474082,\n \"acc_norm\": 0.7055214723926381,\n \"acc_norm_stderr\": 0.03581165790474082\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.39285714285714285,\n \"acc_stderr\": 0.04635550135609976,\n \"acc_norm\": 0.39285714285714285,\n \"acc_norm_stderr\": 0.04635550135609976\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7378640776699029,\n \"acc_stderr\": 0.04354631077260597,\n \"acc_norm\": 0.7378640776699029,\n \"acc_norm_stderr\": 0.04354631077260597\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8547008547008547,\n \"acc_stderr\": 0.023086635086841407,\n \"acc_norm\": 0.8547008547008547,\n \"acc_norm_stderr\": 0.023086635086841407\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7790549169859514,\n \"acc_stderr\": 0.01483620516733357,\n \"acc_norm\": 0.7790549169859514,\n \"acc_norm_stderr\": 0.01483620516733357\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6936416184971098,\n \"acc_stderr\": 0.024818350129436593,\n \"acc_norm\": 0.6936416184971098,\n \"acc_norm_stderr\": 0.024818350129436593\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.32849162011173183,\n \"acc_stderr\": 0.01570793539849646,\n \"acc_norm\": 0.32849162011173183,\n \"acc_norm_stderr\": 0.01570793539849646\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6830065359477124,\n \"acc_stderr\": 0.02664327847450875,\n \"acc_norm\": 0.6830065359477124,\n \"acc_norm_stderr\": 0.02664327847450875\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6720257234726688,\n \"acc_stderr\": 0.026664410886937624,\n \"acc_norm\": 0.6720257234726688,\n \"acc_norm_stderr\": 0.026664410886937624\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7253086419753086,\n \"acc_stderr\": 0.024836057868294677,\n \"acc_norm\": 0.7253086419753086,\n \"acc_norm_stderr\": 0.024836057868294677\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.44680851063829785,\n \"acc_stderr\": 0.029658235097666904,\n \"acc_norm\": 0.44680851063829785,\n \"acc_norm_stderr\": 0.029658235097666904\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4315514993481095,\n \"acc_stderr\": 0.012650007999463878,\n \"acc_norm\": 0.4315514993481095,\n \"acc_norm_stderr\": 0.012650007999463878\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5772058823529411,\n \"acc_stderr\": 0.030008562845003476,\n \"acc_norm\": 0.5772058823529411,\n \"acc_norm_stderr\": 0.030008562845003476\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6094771241830066,\n \"acc_stderr\": 0.0197370089980946,\n \"acc_norm\": 0.6094771241830066,\n \"acc_norm_stderr\": 0.0197370089980946\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6454545454545455,\n \"acc_stderr\": 0.045820048415054174,\n \"acc_norm\": 0.6454545454545455,\n \"acc_norm_stderr\": 0.045820048415054174\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.710204081632653,\n \"acc_stderr\": 0.02904308868330434,\n \"acc_norm\": 0.710204081632653,\n \"acc_norm_stderr\": 0.02904308868330434\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7313432835820896,\n \"acc_stderr\": 0.03134328358208954,\n \"acc_norm\": 0.7313432835820896,\n \"acc_norm_stderr\": 0.03134328358208954\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036846,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036846\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5240963855421686,\n \"acc_stderr\": 0.03887971849597264,\n \"acc_norm\": 0.5240963855421686,\n \"acc_norm_stderr\": 0.03887971849597264\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8070175438596491,\n \"acc_stderr\": 0.030267457554898458,\n \"acc_norm\": 0.8070175438596491,\n \"acc_norm_stderr\": 0.030267457554898458\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.36474908200734396,\n \"mc1_stderr\": 0.01685096106172012,\n \"mc2\": 0.5183588707836261,\n \"mc2_stderr\": 0.0149463233822155\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8089976322020521,\n \"acc_stderr\": 0.011047808761510427\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0841546626231994,\n \"acc_stderr\": 0.0076470240466032045\n }\n}\n```", "repo_url": "https://huggingface.co/Locutusque/Orca-2-13b-SFT_v5", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|arc:challenge|25_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|gsm8k|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hellaswag|10_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T19-07-23.375645.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["**/details_harness|winogrande|5_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T19-07-23.375645.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T19_07_23.375645", "path": ["results_2023-12-16T19-07-23.375645.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T19-07-23.375645.parquet"]}]}]}
2023-12-16T19:11:07+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Locutusque/Orca-2-13b-SFT_v5 Dataset automatically created during the evaluation run of model Locutusque/Orca-2-13b-SFT_v5 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T19:07:23.375645(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Locutusque/Orca-2-13b-SFT_v5\n\n\n\nDataset automatically created during the evaluation run of model Locutusque/Orca-2-13b-SFT_v5 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T19:07:23.375645(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Locutusque/Orca-2-13b-SFT_v5\n\n\n\nDataset automatically created during the evaluation run of model Locutusque/Orca-2-13b-SFT_v5 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T19:07:23.375645(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 191, 66, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Locutusque/Orca-2-13b-SFT_v5\n\n\n\nDataset automatically created during the evaluation run of model Locutusque/Orca-2-13b-SFT_v5 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T19:07:23.375645(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
c2670dad493d42a3d1a0c9d63f309af7436ff71a
# DialogSum Enhanced Dataset ## Overview DialogSum Enhanced is an extension of the original DialogSum dataset, enriched with a new column called 'Relevant Sentences.' This dataset is designed to facilitate research in dialogue summarization by providing additional information about the dialogue turns that GPT-4 considers relevant for generating summaries. ### Changes from DialogSum The primary enhancement in DialogSum Enhanced is the inclusion of the 'Relevant Sentences' column. This column contains the dialogue turns that GPT-4 identified as crucial for the generation of a summary. This information can be valuable for understanding the model's decision-making process and improving dialogue summarization models. ### Split Information - **Train Split:** The train split in DialogSum Enhanced consists of half of the original DialogSum train split. - **Test and Validation Sets:** The test and validation sets in DialogSum Enhanced retain their full length from the original DialogSum dataset. ## Dataset Structure The dataset is provided in a CSV format with the following columns: 1. **id:** Unique identifier for each dialogue. 2. **dialogue:** The sequential turns of the dialogue. 3. **relevant_sentences:** The dialogue turns that GPT-4 considered relevant for generating the summary. 4. **summary:** The reference summary for the dialogue. ## Usage Researchers and practitioners interested in dialogue summarization can leverage DialogSum Enhanced for training, validating, and testing their models. The 'Relevant Sentences' column provides additional insights into the model's decision-making process during summarization.
diogofouto/dialogsum-augmented
[ "license:apache-2.0", "region:us" ]
2023-12-16T19:11:04+00:00
{"license": "apache-2.0"}
2023-12-18T20:46:34+00:00
[]
[]
TAGS #license-apache-2.0 #region-us
# DialogSum Enhanced Dataset ## Overview DialogSum Enhanced is an extension of the original DialogSum dataset, enriched with a new column called 'Relevant Sentences.' This dataset is designed to facilitate research in dialogue summarization by providing additional information about the dialogue turns that GPT-4 considers relevant for generating summaries. ### Changes from DialogSum The primary enhancement in DialogSum Enhanced is the inclusion of the 'Relevant Sentences' column. This column contains the dialogue turns that GPT-4 identified as crucial for the generation of a summary. This information can be valuable for understanding the model's decision-making process and improving dialogue summarization models. ### Split Information - Train Split: The train split in DialogSum Enhanced consists of half of the original DialogSum train split. - Test and Validation Sets: The test and validation sets in DialogSum Enhanced retain their full length from the original DialogSum dataset. ## Dataset Structure The dataset is provided in a CSV format with the following columns: 1. id: Unique identifier for each dialogue. 2. dialogue: The sequential turns of the dialogue. 3. relevant_sentences: The dialogue turns that GPT-4 considered relevant for generating the summary. 4. summary: The reference summary for the dialogue. ## Usage Researchers and practitioners interested in dialogue summarization can leverage DialogSum Enhanced for training, validating, and testing their models. The 'Relevant Sentences' column provides additional insights into the model's decision-making process during summarization.
[ "# DialogSum Enhanced Dataset", "## Overview\n\nDialogSum Enhanced is an extension of the original DialogSum dataset, enriched with a new column called 'Relevant Sentences.' This dataset is designed to facilitate research in dialogue summarization by providing additional information about the dialogue turns that GPT-4 considers relevant for generating summaries.", "### Changes from DialogSum\n\nThe primary enhancement in DialogSum Enhanced is the inclusion of the 'Relevant Sentences' column. This column contains the dialogue turns that GPT-4 identified as crucial for the generation of a summary. This information can be valuable for understanding the model's decision-making process and improving dialogue summarization models.", "### Split Information\n\n- Train Split: The train split in DialogSum Enhanced consists of half of the original DialogSum train split.\n\n- Test and Validation Sets: The test and validation sets in DialogSum Enhanced retain their full length from the original DialogSum dataset.", "## Dataset Structure\n\nThe dataset is provided in a CSV format with the following columns:\n\n1. id: Unique identifier for each dialogue.\n2. dialogue: The sequential turns of the dialogue.\n3. relevant_sentences: The dialogue turns that GPT-4 considered relevant for generating the summary.\n4. summary: The reference summary for the dialogue.", "## Usage\n\nResearchers and practitioners interested in dialogue summarization can leverage DialogSum Enhanced for training, validating, and testing their models. The 'Relevant Sentences' column provides additional insights into the model's decision-making process during summarization." ]
[ "TAGS\n#license-apache-2.0 #region-us \n", "# DialogSum Enhanced Dataset", "## Overview\n\nDialogSum Enhanced is an extension of the original DialogSum dataset, enriched with a new column called 'Relevant Sentences.' This dataset is designed to facilitate research in dialogue summarization by providing additional information about the dialogue turns that GPT-4 considers relevant for generating summaries.", "### Changes from DialogSum\n\nThe primary enhancement in DialogSum Enhanced is the inclusion of the 'Relevant Sentences' column. This column contains the dialogue turns that GPT-4 identified as crucial for the generation of a summary. This information can be valuable for understanding the model's decision-making process and improving dialogue summarization models.", "### Split Information\n\n- Train Split: The train split in DialogSum Enhanced consists of half of the original DialogSum train split.\n\n- Test and Validation Sets: The test and validation sets in DialogSum Enhanced retain their full length from the original DialogSum dataset.", "## Dataset Structure\n\nThe dataset is provided in a CSV format with the following columns:\n\n1. id: Unique identifier for each dialogue.\n2. dialogue: The sequential turns of the dialogue.\n3. relevant_sentences: The dialogue turns that GPT-4 considered relevant for generating the summary.\n4. summary: The reference summary for the dialogue.", "## Usage\n\nResearchers and practitioners interested in dialogue summarization can leverage DialogSum Enhanced for training, validating, and testing their models. The 'Relevant Sentences' column provides additional insights into the model's decision-making process during summarization." ]
[ 14, 9, 75, 83, 68, 79, 65 ]
[ "passage: TAGS\n#license-apache-2.0 #region-us \n# DialogSum Enhanced Dataset## Overview\n\nDialogSum Enhanced is an extension of the original DialogSum dataset, enriched with a new column called 'Relevant Sentences.' This dataset is designed to facilitate research in dialogue summarization by providing additional information about the dialogue turns that GPT-4 considers relevant for generating summaries.### Changes from DialogSum\n\nThe primary enhancement in DialogSum Enhanced is the inclusion of the 'Relevant Sentences' column. This column contains the dialogue turns that GPT-4 identified as crucial for the generation of a summary. This information can be valuable for understanding the model's decision-making process and improving dialogue summarization models.### Split Information\n\n- Train Split: The train split in DialogSum Enhanced consists of half of the original DialogSum train split.\n\n- Test and Validation Sets: The test and validation sets in DialogSum Enhanced retain their full length from the original DialogSum dataset.## Dataset Structure\n\nThe dataset is provided in a CSV format with the following columns:\n\n1. id: Unique identifier for each dialogue.\n2. dialogue: The sequential turns of the dialogue.\n3. relevant_sentences: The dialogue turns that GPT-4 considered relevant for generating the summary.\n4. summary: The reference summary for the dialogue.## Usage\n\nResearchers and practitioners interested in dialogue summarization can leverage DialogSum Enhanced for training, validating, and testing their models. The 'Relevant Sentences' column provides additional insights into the model's decision-making process during summarization." ]
e218bd1714ccb2c4b421ba76e75af22b2e6d7695
# Dataset Card for Evaluation run of zyh3826/20231206094523-pretrain-Llama-2-13b-hf-76000 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [zyh3826/20231206094523-pretrain-Llama-2-13b-hf-76000](https://huggingface.co/zyh3826/20231206094523-pretrain-Llama-2-13b-hf-76000) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_zyh3826__20231206094523-pretrain-Llama-2-13b-hf-76000", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T19:10:08.159006](https://huggingface.co/datasets/open-llm-leaderboard/details_zyh3826__20231206094523-pretrain-Llama-2-13b-hf-76000/blob/main/results_2023-12-16T19-10-08.159006.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.24943893194371924, "acc_stderr": 0.030400489062706072, "acc_norm": 0.25014496177092693, "acc_norm_stderr": 0.031209015064341802, "mc1": 0.25458996328029376, "mc1_stderr": 0.015250117079156482, "mc2": 0.4471244819837127, "mc2_stderr": 0.014622242508536614 }, "harness|arc:challenge|25": { "acc": 0.27303754266211605, "acc_stderr": 0.01301933276263575, "acc_norm": 0.310580204778157, "acc_norm_stderr": 0.013522292098053055 }, "harness|hellaswag|10": { "acc": 0.4026090420235013, "acc_stderr": 0.0048942100113032235, "acc_norm": 0.5203146783509262, "acc_norm_stderr": 0.0049856612829985835 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932269, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932269 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.25925925925925924, "acc_stderr": 0.03785714465066653, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.03785714465066653 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.18421052631578946, "acc_stderr": 0.0315469804508223, "acc_norm": 0.18421052631578946, "acc_norm_stderr": 0.0315469804508223 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.26037735849056604, "acc_stderr": 0.02700876609070809, "acc_norm": 0.26037735849056604, "acc_norm_stderr": 0.02700876609070809 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2222222222222222, "acc_stderr": 0.03476590104304134, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.03476590104304134 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.2, "acc_stderr": 0.040201512610368445, "acc_norm": 0.2, "acc_norm_stderr": 0.040201512610368445 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.15, "acc_stderr": 0.03588702812826372, "acc_norm": 0.15, "acc_norm_stderr": 0.03588702812826372 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.24, "acc_stderr": 0.042923469599092816, "acc_norm": 0.24, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.2254335260115607, "acc_stderr": 0.03186209851641143, "acc_norm": 0.2254335260115607, "acc_norm_stderr": 0.03186209851641143 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.19607843137254902, "acc_stderr": 0.03950581861179961, "acc_norm": 0.19607843137254902, "acc_norm_stderr": 0.03950581861179961 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.24, "acc_stderr": 0.04292346959909282, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909282 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.32340425531914896, "acc_stderr": 0.030579442773610334, "acc_norm": 0.32340425531914896, "acc_norm_stderr": 0.030579442773610334 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.21929824561403508, "acc_stderr": 0.03892431106518754, "acc_norm": 0.21929824561403508, "acc_norm_stderr": 0.03892431106518754 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.20689655172413793, "acc_stderr": 0.03375672449560554, "acc_norm": 0.20689655172413793, "acc_norm_stderr": 0.03375672449560554 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.25132275132275134, "acc_stderr": 0.022340482339643898, "acc_norm": 0.25132275132275134, "acc_norm_stderr": 0.022340482339643898 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.21428571428571427, "acc_stderr": 0.03670066451047182, "acc_norm": 0.21428571428571427, "acc_norm_stderr": 0.03670066451047182 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.25806451612903225, "acc_stderr": 0.024892469172462833, "acc_norm": 0.25806451612903225, "acc_norm_stderr": 0.024892469172462833 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.28078817733990147, "acc_stderr": 0.0316185633535861, "acc_norm": 0.28078817733990147, "acc_norm_stderr": 0.0316185633535861 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03225078108306289, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.21212121212121213, "acc_stderr": 0.029126522834586818, "acc_norm": 0.21212121212121213, "acc_norm_stderr": 0.029126522834586818 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.21243523316062177, "acc_stderr": 0.029519282616817244, "acc_norm": 0.21243523316062177, "acc_norm_stderr": 0.029519282616817244 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.20512820512820512, "acc_stderr": 0.02047323317355198, "acc_norm": 0.20512820512820512, "acc_norm_stderr": 0.02047323317355198 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.26296296296296295, "acc_stderr": 0.026842057873833706, "acc_norm": 0.26296296296296295, "acc_norm_stderr": 0.026842057873833706 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.23109243697478993, "acc_stderr": 0.027381406927868966, "acc_norm": 0.23109243697478993, "acc_norm_stderr": 0.027381406927868966 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2052980132450331, "acc_stderr": 0.03297986648473835, "acc_norm": 0.2052980132450331, "acc_norm_stderr": 0.03297986648473835 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.23119266055045873, "acc_stderr": 0.01807575024163315, "acc_norm": 0.23119266055045873, "acc_norm_stderr": 0.01807575024163315 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.16666666666666666, "acc_stderr": 0.025416428388767478, "acc_norm": 0.16666666666666666, "acc_norm_stderr": 0.025416428388767478 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.25, "acc_stderr": 0.03039153369274154, "acc_norm": 0.25, "acc_norm_stderr": 0.03039153369274154 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.24472573839662448, "acc_stderr": 0.027985699387036416, "acc_norm": 0.24472573839662448, "acc_norm_stderr": 0.027985699387036416 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.38565022421524664, "acc_stderr": 0.03266842214289201, "acc_norm": 0.38565022421524664, "acc_norm_stderr": 0.03266842214289201 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.22137404580152673, "acc_stderr": 0.036412970813137276, "acc_norm": 0.22137404580152673, "acc_norm_stderr": 0.036412970813137276 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2396694214876033, "acc_stderr": 0.038968789850704164, "acc_norm": 0.2396694214876033, "acc_norm_stderr": 0.038968789850704164 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.2777777777777778, "acc_stderr": 0.04330043749650743, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.04330043749650743 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.27607361963190186, "acc_stderr": 0.03512385283705051, "acc_norm": 0.27607361963190186, "acc_norm_stderr": 0.03512385283705051 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04287858751340456, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04287858751340456 }, "harness|hendrycksTest-management|5": { "acc": 0.2524271844660194, "acc_stderr": 0.04301250399690877, "acc_norm": 0.2524271844660194, "acc_norm_stderr": 0.04301250399690877 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2606837606837607, "acc_stderr": 0.028760348956523414, "acc_norm": 0.2606837606837607, "acc_norm_stderr": 0.028760348956523414 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.2771392081736909, "acc_stderr": 0.01600563629412242, "acc_norm": 0.2771392081736909, "acc_norm_stderr": 0.01600563629412242 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.2514450867052023, "acc_stderr": 0.02335736578587404, "acc_norm": 0.2514450867052023, "acc_norm_stderr": 0.02335736578587404 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.014333522059217889, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.014333522059217889 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.21895424836601307, "acc_stderr": 0.02367908986180772, "acc_norm": 0.21895424836601307, "acc_norm_stderr": 0.02367908986180772 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.26688102893890675, "acc_stderr": 0.025122637608816643, "acc_norm": 0.26688102893890675, "acc_norm_stderr": 0.025122637608816643 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.2777777777777778, "acc_stderr": 0.024922001168886338, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.024922001168886338 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.24822695035460993, "acc_stderr": 0.025770015644290403, "acc_norm": 0.24822695035460993, "acc_norm_stderr": 0.025770015644290403 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2457627118644068, "acc_stderr": 0.010996156635142692, "acc_norm": 0.2457627118644068, "acc_norm_stderr": 0.010996156635142692 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.18382352941176472, "acc_stderr": 0.023529242185193113, "acc_norm": 0.18382352941176472, "acc_norm_stderr": 0.023529242185193113 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.24836601307189543, "acc_stderr": 0.017479487001364764, "acc_norm": 0.24836601307189543, "acc_norm_stderr": 0.017479487001364764 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.34545454545454546, "acc_stderr": 0.04554619617541054, "acc_norm": 0.34545454545454546, "acc_norm_stderr": 0.04554619617541054 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.19591836734693877, "acc_stderr": 0.02540930195322568, "acc_norm": 0.19591836734693877, "acc_norm_stderr": 0.02540930195322568 }, "harness|hendrycksTest-sociology|5": { "acc": 0.23880597014925373, "acc_stderr": 0.030147775935409224, "acc_norm": 0.23880597014925373, "acc_norm_stderr": 0.030147775935409224 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-virology|5": { "acc": 0.3253012048192771, "acc_stderr": 0.03647168523683227, "acc_norm": 0.3253012048192771, "acc_norm_stderr": 0.03647168523683227 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.21052631578947367, "acc_stderr": 0.0312678171466318, "acc_norm": 0.21052631578947367, "acc_norm_stderr": 0.0312678171466318 }, "harness|truthfulqa:mc|0": { "mc1": 0.25458996328029376, "mc1_stderr": 0.015250117079156482, "mc2": 0.4471244819837127, "mc2_stderr": 0.014622242508536614 }, "harness|winogrande|5": { "acc": 0.6124704025256511, "acc_stderr": 0.01369235463601677 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_zyh3826__20231206094523-pretrain-Llama-2-13b-hf-76000
[ "region:us" ]
2023-12-16T19:13:02+00:00
{"pretty_name": "Evaluation run of zyh3826/20231206094523-pretrain-Llama-2-13b-hf-76000", "dataset_summary": "Dataset automatically created during the evaluation run of model [zyh3826/20231206094523-pretrain-Llama-2-13b-hf-76000](https://huggingface.co/zyh3826/20231206094523-pretrain-Llama-2-13b-hf-76000) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_zyh3826__20231206094523-pretrain-Llama-2-13b-hf-76000\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T19:10:08.159006](https://huggingface.co/datasets/open-llm-leaderboard/details_zyh3826__20231206094523-pretrain-Llama-2-13b-hf-76000/blob/main/results_2023-12-16T19-10-08.159006.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.24943893194371924,\n \"acc_stderr\": 0.030400489062706072,\n \"acc_norm\": 0.25014496177092693,\n \"acc_norm_stderr\": 0.031209015064341802,\n \"mc1\": 0.25458996328029376,\n \"mc1_stderr\": 0.015250117079156482,\n \"mc2\": 0.4471244819837127,\n \"mc2_stderr\": 0.014622242508536614\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.27303754266211605,\n \"acc_stderr\": 0.01301933276263575,\n \"acc_norm\": 0.310580204778157,\n \"acc_norm_stderr\": 0.013522292098053055\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.4026090420235013,\n \"acc_stderr\": 0.0048942100113032235,\n \"acc_norm\": 0.5203146783509262,\n \"acc_norm_stderr\": 0.0049856612829985835\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932269,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932269\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.03785714465066653,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.03785714465066653\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.18421052631578946,\n \"acc_stderr\": 0.0315469804508223,\n \"acc_norm\": 0.18421052631578946,\n \"acc_norm_stderr\": 0.0315469804508223\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.26037735849056604,\n \"acc_stderr\": 0.02700876609070809,\n \"acc_norm\": 0.26037735849056604,\n \"acc_norm_stderr\": 0.02700876609070809\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2222222222222222,\n \"acc_stderr\": 0.03476590104304134,\n \"acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.03476590104304134\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.040201512610368445,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.040201512610368445\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.15,\n \"acc_stderr\": 0.03588702812826372,\n \"acc_norm\": 0.15,\n \"acc_norm_stderr\": 0.03588702812826372\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.2254335260115607,\n \"acc_stderr\": 0.03186209851641143,\n \"acc_norm\": 0.2254335260115607,\n \"acc_norm_stderr\": 0.03186209851641143\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.19607843137254902,\n \"acc_stderr\": 0.03950581861179961,\n \"acc_norm\": 0.19607843137254902,\n \"acc_norm_stderr\": 0.03950581861179961\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909282,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909282\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.32340425531914896,\n \"acc_stderr\": 0.030579442773610334,\n \"acc_norm\": 0.32340425531914896,\n \"acc_norm_stderr\": 0.030579442773610334\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.21929824561403508,\n \"acc_stderr\": 0.03892431106518754,\n \"acc_norm\": 0.21929824561403508,\n \"acc_norm_stderr\": 0.03892431106518754\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.20689655172413793,\n \"acc_stderr\": 0.03375672449560554,\n \"acc_norm\": 0.20689655172413793,\n \"acc_norm_stderr\": 0.03375672449560554\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.25132275132275134,\n \"acc_stderr\": 0.022340482339643898,\n \"acc_norm\": 0.25132275132275134,\n \"acc_norm_stderr\": 0.022340482339643898\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.21428571428571427,\n \"acc_stderr\": 0.03670066451047182,\n \"acc_norm\": 0.21428571428571427,\n \"acc_norm_stderr\": 0.03670066451047182\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.25806451612903225,\n \"acc_stderr\": 0.024892469172462833,\n \"acc_norm\": 0.25806451612903225,\n \"acc_norm_stderr\": 0.024892469172462833\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.28078817733990147,\n \"acc_stderr\": 0.0316185633535861,\n \"acc_norm\": 0.28078817733990147,\n \"acc_norm_stderr\": 0.0316185633535861\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.21212121212121213,\n \"acc_stderr\": 0.029126522834586818,\n \"acc_norm\": 0.21212121212121213,\n \"acc_norm_stderr\": 0.029126522834586818\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.21243523316062177,\n \"acc_stderr\": 0.029519282616817244,\n \"acc_norm\": 0.21243523316062177,\n \"acc_norm_stderr\": 0.029519282616817244\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.20512820512820512,\n \"acc_stderr\": 0.02047323317355198,\n \"acc_norm\": 0.20512820512820512,\n \"acc_norm_stderr\": 0.02047323317355198\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.26296296296296295,\n \"acc_stderr\": 0.026842057873833706,\n \"acc_norm\": 0.26296296296296295,\n \"acc_norm_stderr\": 0.026842057873833706\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.23109243697478993,\n \"acc_stderr\": 0.027381406927868966,\n \"acc_norm\": 0.23109243697478993,\n \"acc_norm_stderr\": 0.027381406927868966\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2052980132450331,\n \"acc_stderr\": 0.03297986648473835,\n \"acc_norm\": 0.2052980132450331,\n \"acc_norm_stderr\": 0.03297986648473835\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.23119266055045873,\n \"acc_stderr\": 0.01807575024163315,\n \"acc_norm\": 0.23119266055045873,\n \"acc_norm_stderr\": 0.01807575024163315\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.16666666666666666,\n \"acc_stderr\": 0.025416428388767478,\n \"acc_norm\": 0.16666666666666666,\n \"acc_norm_stderr\": 0.025416428388767478\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.03039153369274154,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.03039153369274154\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.24472573839662448,\n \"acc_stderr\": 0.027985699387036416,\n \"acc_norm\": 0.24472573839662448,\n \"acc_norm_stderr\": 0.027985699387036416\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.38565022421524664,\n \"acc_stderr\": 0.03266842214289201,\n \"acc_norm\": 0.38565022421524664,\n \"acc_norm_stderr\": 0.03266842214289201\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.22137404580152673,\n \"acc_stderr\": 0.036412970813137276,\n \"acc_norm\": 0.22137404580152673,\n \"acc_norm_stderr\": 0.036412970813137276\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.2396694214876033,\n \"acc_stderr\": 0.038968789850704164,\n \"acc_norm\": 0.2396694214876033,\n \"acc_norm_stderr\": 0.038968789850704164\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.2777777777777778,\n \"acc_stderr\": 0.04330043749650743,\n \"acc_norm\": 0.2777777777777778,\n \"acc_norm_stderr\": 0.04330043749650743\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.27607361963190186,\n \"acc_stderr\": 0.03512385283705051,\n \"acc_norm\": 0.27607361963190186,\n \"acc_norm_stderr\": 0.03512385283705051\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.04287858751340456,\n \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.04287858751340456\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.2524271844660194,\n \"acc_stderr\": 0.04301250399690877,\n \"acc_norm\": 0.2524271844660194,\n \"acc_norm_stderr\": 0.04301250399690877\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2606837606837607,\n \"acc_stderr\": 0.028760348956523414,\n \"acc_norm\": 0.2606837606837607,\n \"acc_norm_stderr\": 0.028760348956523414\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.0446196043338474,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.2771392081736909,\n \"acc_stderr\": 0.01600563629412242,\n \"acc_norm\": 0.2771392081736909,\n \"acc_norm_stderr\": 0.01600563629412242\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.2514450867052023,\n \"acc_stderr\": 0.02335736578587404,\n \"acc_norm\": 0.2514450867052023,\n \"acc_norm_stderr\": 0.02335736578587404\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2424581005586592,\n \"acc_stderr\": 0.014333522059217889,\n \"acc_norm\": 0.2424581005586592,\n \"acc_norm_stderr\": 0.014333522059217889\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.21895424836601307,\n \"acc_stderr\": 0.02367908986180772,\n \"acc_norm\": 0.21895424836601307,\n \"acc_norm_stderr\": 0.02367908986180772\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.26688102893890675,\n \"acc_stderr\": 0.025122637608816643,\n \"acc_norm\": 0.26688102893890675,\n \"acc_norm_stderr\": 0.025122637608816643\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.2777777777777778,\n \"acc_stderr\": 0.024922001168886338,\n \"acc_norm\": 0.2777777777777778,\n \"acc_norm_stderr\": 0.024922001168886338\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.24822695035460993,\n \"acc_stderr\": 0.025770015644290403,\n \"acc_norm\": 0.24822695035460993,\n \"acc_norm_stderr\": 0.025770015644290403\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2457627118644068,\n \"acc_stderr\": 0.010996156635142692,\n \"acc_norm\": 0.2457627118644068,\n \"acc_norm_stderr\": 0.010996156635142692\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.18382352941176472,\n \"acc_stderr\": 0.023529242185193113,\n \"acc_norm\": 0.18382352941176472,\n \"acc_norm_stderr\": 0.023529242185193113\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.24836601307189543,\n \"acc_stderr\": 0.017479487001364764,\n \"acc_norm\": 0.24836601307189543,\n \"acc_norm_stderr\": 0.017479487001364764\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.34545454545454546,\n \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.34545454545454546,\n \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.19591836734693877,\n \"acc_stderr\": 0.02540930195322568,\n \"acc_norm\": 0.19591836734693877,\n \"acc_norm_stderr\": 0.02540930195322568\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.23880597014925373,\n \"acc_stderr\": 0.030147775935409224,\n \"acc_norm\": 0.23880597014925373,\n \"acc_norm_stderr\": 0.030147775935409224\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3253012048192771,\n \"acc_stderr\": 0.03647168523683227,\n \"acc_norm\": 0.3253012048192771,\n \"acc_norm_stderr\": 0.03647168523683227\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.21052631578947367,\n \"acc_stderr\": 0.0312678171466318,\n \"acc_norm\": 0.21052631578947367,\n \"acc_norm_stderr\": 0.0312678171466318\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.25458996328029376,\n \"mc1_stderr\": 0.015250117079156482,\n \"mc2\": 0.4471244819837127,\n \"mc2_stderr\": 0.014622242508536614\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6124704025256511,\n \"acc_stderr\": 0.01369235463601677\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/zyh3826/20231206094523-pretrain-Llama-2-13b-hf-76000", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|arc:challenge|25_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|gsm8k|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hellaswag|10_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T19-10-08.159006.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["**/details_harness|winogrande|5_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T19-10-08.159006.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T19_10_08.159006", "path": ["results_2023-12-16T19-10-08.159006.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T19-10-08.159006.parquet"]}]}]}
2023-12-16T19:13:44+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of zyh3826/20231206094523-pretrain-Llama-2-13b-hf-76000 Dataset automatically created during the evaluation run of model zyh3826/20231206094523-pretrain-Llama-2-13b-hf-76000 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T19:10:08.159006(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of zyh3826/20231206094523-pretrain-Llama-2-13b-hf-76000\n\n\n\nDataset automatically created during the evaluation run of model zyh3826/20231206094523-pretrain-Llama-2-13b-hf-76000 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T19:10:08.159006(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of zyh3826/20231206094523-pretrain-Llama-2-13b-hf-76000\n\n\n\nDataset automatically created during the evaluation run of model zyh3826/20231206094523-pretrain-Llama-2-13b-hf-76000 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T19:10:08.159006(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 207, 66, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of zyh3826/20231206094523-pretrain-Llama-2-13b-hf-76000\n\n\n\nDataset automatically created during the evaluation run of model zyh3826/20231206094523-pretrain-Llama-2-13b-hf-76000 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T19:10:08.159006(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]" ]
65d52be7d28249eb5cdf90a1ac18d0ced23ee952
# ObjectNet A webp (lossless) encoded version of [ObjectNet-1.0](https://objectnet.dev/index.html) at original resolution. ## License / Usage Terms ObjectNet is free to use for both research and commercial applications. The authors own the source images and allow their use under a license derived from Creative Commons Attribution 4.0 with only two additional clauses. 1. **ObjectNet may never be used to tune the parameters of any model.** 2. **Any individual images from ObjectNet may only be posted to the web including their 1 pixel red border**. If you are using ObjectNet, please cite our work, the citation appears at the bottom of this page. Any derivative of ObjectNet must contain attribution as well. ## About What is ObjectNet? * A new kind of vision dataset borrowing the idea of controls from other areas of science. * No training set, only a test set! Put your vision system through its paces. * Collected to intentionally show objects from new viewpoints on new backgrounds. * 50,000 image test set, same as ImageNet, with controls for rotation, background, and viewpoint. * 313 object classes with 113 overlapping ImageNet * Large performance drop, what you can expect from vision systems in the real world! * Robust to fine-tuning and a very difficult transfer learning problem ## Why the Red Borders / How do I recognize if an image is in ObjectNet? As training sets become huge, the risk that test and training sets overlap is serious. We provide ObjectNet with a 2 pixel red border around each image which must be removed before performing inference. The ObjectNet license requires that if you post images from ObjectNet to the web, you include this border. Any time you see an image with a solid 2 pixel red border, that's an indication it's in someone's test set and you should be careful about training on it. Reverse image search will allow you to figure out which test set it is from. NOTE: original ObjectNet PNG files actually have a 2 pixel red border while their descriptions say 1. ## Preprocessing Steps for This timm Version 1. Re-encode PNG images with lossless WebP (~32% reduction in size), keeping red border. 2. Add `imagenet_labels` and `imagenet_synsets` consisting of lists of ImageNet-1k classes that overlap with ObjectNet class. ## Citation ```bibtex @incollection{NIPS2019_9142, title = {ObjectNet: A large-scale bias-controlled dataset for pushing the limits of object recognition models}, author = {Barbu, Andrei and Mayo, David and Alverio, Julian and Luo, William and Wang, Christopher and Gutfreund, Dan and Tenenbaum, Josh and Katz, Boris}, booktitle = {Advances in Neural Information Processing Systems 32}, editor = {H. Wallach and H. Larochelle and A. Beygelzimer and F. d\textquotesingle Alch\'{e}-Buc and E. Fox and R. Garnett}, pages = {9448--9458}, year = {2019}, publisher = {Curran Associates, Inc.}, url = {http://papers.nips.cc/paper/9142-objectnet-a-large-scale-bias-controlled-dataset-for-pushing-the-limits-of-object-recognition-models.pdf} } ```
timm/objectnet
[ "task_categories:image-classification", "size_categories:10K<n<100K", "license:other", "region:us" ]
2023-12-16T19:16:24+00:00
{"license": "other", "size_categories": ["10K<n<100K"], "task_categories": ["image-classification"], "pretty_name": "ObjectNet", "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "air_freshener", "1": "alarm_clock", "2": "backpack", "3": "baking_sheet", "4": "banana", "5": "band_aid", "6": "baseball_bat", "7": "baseball_glove", "8": "basket", "9": "bathrobe", "10": "battery", "11": "bed_sheet", "12": "beer_bottle", "13": "beer_can", "14": "belt", "15": "bench", "16": "bicycle", "17": "bike_pump", "18": "bills_money", "19": "binder_closed", "20": "biscuits", "21": "blanket", "22": "blender", "23": "blouse", "24": "board_game", "25": "book_closed", "26": "bookend", "27": "boots", "28": "bottle_cap", "29": "bottle_opener", "30": "bottle_stopper", "31": "box", "32": "bracelet", "33": "bread_knife", "34": "bread_loaf", "35": "briefcase", "36": "brooch", "37": "broom", "38": "bucket", "39": "butchers_knife", "40": "butter", "41": "button", "42": "calendar", "43": "can_opener", "44": "candle", "45": "canned_food", "46": "cd_case", "47": "cellphone", "48": "cellphone_case", "49": "cellphone_charger", "50": "cereal", "51": "chair", "52": "cheese", "53": "chess_piece", "54": "chocolate", "55": "chopstick", "56": "clothes_hamper", "57": "clothes_hanger", "58": "coaster", "59": "coffee_beans", "60": "coffee_french_press", "61": "coffee_grinder", "62": "coffee_machine", "63": "coffee_table", "64": "coin_money", "65": "comb", "66": "combination_lock", "67": "computer_mouse", "68": "contact_lens_case", "69": "cooking_oil_bottle", "70": "cork", "71": "cutting_board", "72": "deodorant", "73": "desk_lamp", "74": "detergent", "75": "dish_soap", "76": "document_folder_closed", "77": "dog_bed", "78": "doormat", "79": "drawer_open", "80": "dress", "81": "dress_pants", "82": "dress_shirt", "83": "dress_shoe_men", "84": "dress_shoe_women", "85": "drill", "86": "drinking_cup", "87": "drinking_straw", "88": "drying_rack_for_clothes", "89": "drying_rack_for_dishes", "90": "dust_pan", "91": "dvd_player", "92": "earbuds", "93": "earring", "94": "egg", "95": "egg_carton", "96": "envelope", "97": "eraser_white_board", "98": "extension_cable", "99": "eyeglasses", "100": "fan", "101": "figurine_or_statue", "102": "first_aid_kit", "103": "flashlight", "104": "floss_container", "105": "flour_container", "106": "fork", "107": "frying_pan", "108": "full_sized_towel", "109": "glue_container", "110": "hair_brush", "111": "hair_dryer", "112": "hairclip", "113": "hairtie", "114": "hammer", "115": "hand_mirror", "116": "hand_towel_or_rag", "117": "handbag", "118": "hat", "119": "headphones_over_ear", "120": "helmet", "121": "honey_container", "122": "ice", "123": "ice_cube_tray", "124": "iron_for_clothes", "125": "ironing_board", "126": "jam", "127": "jar", "128": "jeans", "129": "kettle", "130": "key_chain", "131": "keyboard", "132": "ladle", "133": "lampshade", "134": "laptop_charger", "135": "laptop_open", "136": "leaf", "137": "leggings", "138": "lemon", "139": "letter_opener", "140": "lettuce", "141": "light_bulb", "142": "lighter", "143": "lipstick", "144": "loofah", "145": "magazine", "146": "makeup", "147": "makeup_brush", "148": "marker", "149": "match", "150": "measuring_cup", "151": "microwave", "152": "milk", "153": "mixing_salad_bowl", "154": "monitor", "155": "mouse_pad", "156": "mouthwash", "157": "mug", "158": "multitool", "159": "nail_clippers", "160": "nail_fastener", "161": "nail_file", "162": "nail_polish", "163": "napkin", "164": "necklace", "165": "newspaper", "166": "night_light", "167": "nightstand", "168": "notebook", "169": "notepad", "170": "nut_for_screw", "171": "orange", "172": "oven_mitts", "173": "padlock", "174": "paint_can", "175": "paintbrush", "176": "paper", "177": "paper_bag", "178": "paper_plates", "179": "paper_towel", "180": "paperclip", "181": "peeler", "182": "pen", "183": "pencil", "184": "pepper_shaker", "185": "pet_food_container", "186": "phone_landline", "187": "photograph_printed", "188": "pill_bottle", "189": "pill_organizer", "190": "pillow", "191": "pitcher", "192": "placemat", "193": "plastic_bag", "194": "plastic_cup", "195": "plastic_wrap", "196": "plate", "197": "playing_cards", "198": "pliers", "199": "plunger", "200": "pop_can", "201": "portable_heater", "202": "poster", "203": "power_bar", "204": "power_cable", "205": "printer", "206": "raincoat", "207": "rake", "208": "razor", "209": "receipt", "210": "remote_control", "211": "removable_blade", "212": "ribbon", "213": "ring", "214": "rock", "215": "rolling_pin", "216": "ruler", "217": "running_shoe", "218": "safety_pin", "219": "salt_shaker", "220": "sandal", "221": "scarf", "222": "scissors", "223": "screw", "224": "scrub_brush", "225": "sewing_kit", "226": "shampoo_bottle", "227": "shoelace", "228": "shorts", "229": "shovel", "230": "skateboard", "231": "skirt", "232": "sleeping_bag", "233": "slipper", "234": "soap_bar", "235": "soap_dispenser", "236": "sock", "237": "soup_bowl", "238": "spatula", "239": "speaker", "240": "sponge", "241": "spoon", "242": "spray_bottle", "243": "squeegee", "244": "squeeze_bottle", "245": "standing_lamp", "246": "stapler", "247": "step_stool", "248": "still_camera", "249": "stopper_sink_tub", "250": "strainer", "251": "stuffed_animal", "252": "sugar_container", "253": "suit_jacket", "254": "suitcase", "255": "sunglasses", "256": "sweater", "257": "swimming_trunks", "258": "t-shirt", "259": "table_knife", "260": "tablecloth", "261": "tablet_ipad", "262": "tanktop", "263": "tape", "264": "tape_measure", "265": "tarp", "266": "teabag", "267": "teapot", "268": "tennis_racket", "269": "thermometer", "270": "thermos", "271": "throw_pillow", "272": "tie", "273": "tissue", "274": "toaster", "275": "toilet_paper_roll", "276": "tomato", "277": "tongs", "278": "toothbrush", "279": "toothpaste", "280": "tote_bag", "281": "toy", "282": "trash_bag", "283": "trash_bin", "284": "travel_case", "285": "tray", "286": "trophy", "287": "tv", "288": "tweezers", "289": "umbrella", "290": "usb_cable", "291": "usb_flash_drive", "292": "vacuum_cleaner", "293": "vase", "294": "video_camera", "295": "walker", "296": "walking_cane", "297": "wallet", "298": "watch", "299": "water_bottle", "300": "water_filter", "301": "webcam", "302": "weight_exercise", "303": "weight_scale", "304": "wheel", "305": "whisk", "306": "whistle", "307": "wine_bottle", "308": "wine_glass", "309": "winter_glove", "310": "wok", "311": "wrench", "312": "ziploc_bag"}}}}, {"name": "imagenet_labels", "sequence": "int64"}, {"name": "imagenet_synsets", "sequence": "string"}], "splits": [{"name": "test", "num_bytes": 127647283245.571, "num_examples": 50273}], "download_size": 125292547404, "dataset_size": 127647283245.571}, "configs": [{"config_name": "default", "data_files": [{"split": "test", "path": "data/test-*"}]}], "extra_gated_prompt": "By clicking on \u201cAccess repository\u201d below, you also agree to ObjectNet Terms: ObjectNet is free to use for both research and commercial applications. The authors own the source images and allow their use under a license derived from Creative Commons Attribution 4.0 with only two additional clauses.\n1. ObjectNet may never be used to tune the parameters of any model.\n2. Any individual images from ObjectNet may only be posted to the web including their 1 pixel red border.\nIf you are using ObjectNet, please cite our work, the citation appears at the bottom of this page. Any derivative of ObjectNet must contain attribution as well."}
2023-12-17T00:54:13+00:00
[]
[]
TAGS #task_categories-image-classification #size_categories-10K<n<100K #license-other #region-us
# ObjectNet A webp (lossless) encoded version of ObjectNet-1.0 at original resolution. ## License / Usage Terms ObjectNet is free to use for both research and commercial applications. The authors own the source images and allow their use under a license derived from Creative Commons Attribution 4.0 with only two additional clauses. 1. ObjectNet may never be used to tune the parameters of any model. 2. Any individual images from ObjectNet may only be posted to the web including their 1 pixel red border. If you are using ObjectNet, please cite our work, the citation appears at the bottom of this page. Any derivative of ObjectNet must contain attribution as well. ## About What is ObjectNet? * A new kind of vision dataset borrowing the idea of controls from other areas of science. * No training set, only a test set! Put your vision system through its paces. * Collected to intentionally show objects from new viewpoints on new backgrounds. * 50,000 image test set, same as ImageNet, with controls for rotation, background, and viewpoint. * 313 object classes with 113 overlapping ImageNet * Large performance drop, what you can expect from vision systems in the real world! * Robust to fine-tuning and a very difficult transfer learning problem ## Why the Red Borders / How do I recognize if an image is in ObjectNet? As training sets become huge, the risk that test and training sets overlap is serious. We provide ObjectNet with a 2 pixel red border around each image which must be removed before performing inference. The ObjectNet license requires that if you post images from ObjectNet to the web, you include this border. Any time you see an image with a solid 2 pixel red border, that's an indication it's in someone's test set and you should be careful about training on it. Reverse image search will allow you to figure out which test set it is from. NOTE: original ObjectNet PNG files actually have a 2 pixel red border while their descriptions say 1. ## Preprocessing Steps for This timm Version 1. Re-encode PNG images with lossless WebP (~32% reduction in size), keeping red border. 2. Add 'imagenet_labels' and 'imagenet_synsets' consisting of lists of ImageNet-1k classes that overlap with ObjectNet class.
[ "# ObjectNet\n\nA webp (lossless) encoded version of ObjectNet-1.0 at original resolution.", "## License / Usage Terms\n\nObjectNet is free to use for both research and commercial applications. The authors own the source images and allow their use under a license derived from Creative Commons Attribution 4.0 with only two additional clauses.\n\n1. ObjectNet may never be used to tune the parameters of any model.\n2. Any individual images from ObjectNet may only be posted to the web including their 1 pixel red border.\nIf you are using ObjectNet, please cite our work, the citation appears at the bottom of this page. Any derivative of ObjectNet must contain attribution as well.", "## About\n\nWhat is ObjectNet?\n* A new kind of vision dataset borrowing the idea of controls from other areas of science.\n* No training set, only a test set! Put your vision system through its paces.\n* Collected to intentionally show objects from new viewpoints on new backgrounds.\n* 50,000 image test set, same as ImageNet, with controls for rotation, background, and viewpoint.\n* 313 object classes with 113 overlapping ImageNet\n* Large performance drop, what you can expect from vision systems in the real world!\n* Robust to fine-tuning and a very difficult transfer learning problem", "## Why the Red Borders / How do I recognize if an image is in ObjectNet?\n\nAs training sets become huge, the risk that test and training sets overlap is serious. We provide ObjectNet with a 2 pixel red border around each image which must be removed before performing inference. The ObjectNet license requires that if you post images from ObjectNet to the web, you include this border. Any time you see an image with a solid 2 pixel red border, that's an indication it's in someone's test set and you should be careful about training on it. Reverse image search will allow you to figure out which test set it is from.\n\nNOTE: original ObjectNet PNG files actually have a 2 pixel red border while their descriptions say 1.", "## Preprocessing Steps for This timm Version\n1. Re-encode PNG images with lossless WebP (~32% reduction in size), keeping red border.\n2. Add 'imagenet_labels' and 'imagenet_synsets' consisting of lists of ImageNet-1k classes that overlap with ObjectNet class." ]
[ "TAGS\n#task_categories-image-classification #size_categories-10K<n<100K #license-other #region-us \n", "# ObjectNet\n\nA webp (lossless) encoded version of ObjectNet-1.0 at original resolution.", "## License / Usage Terms\n\nObjectNet is free to use for both research and commercial applications. The authors own the source images and allow their use under a license derived from Creative Commons Attribution 4.0 with only two additional clauses.\n\n1. ObjectNet may never be used to tune the parameters of any model.\n2. Any individual images from ObjectNet may only be posted to the web including their 1 pixel red border.\nIf you are using ObjectNet, please cite our work, the citation appears at the bottom of this page. Any derivative of ObjectNet must contain attribution as well.", "## About\n\nWhat is ObjectNet?\n* A new kind of vision dataset borrowing the idea of controls from other areas of science.\n* No training set, only a test set! Put your vision system through its paces.\n* Collected to intentionally show objects from new viewpoints on new backgrounds.\n* 50,000 image test set, same as ImageNet, with controls for rotation, background, and viewpoint.\n* 313 object classes with 113 overlapping ImageNet\n* Large performance drop, what you can expect from vision systems in the real world!\n* Robust to fine-tuning and a very difficult transfer learning problem", "## Why the Red Borders / How do I recognize if an image is in ObjectNet?\n\nAs training sets become huge, the risk that test and training sets overlap is serious. We provide ObjectNet with a 2 pixel red border around each image which must be removed before performing inference. The ObjectNet license requires that if you post images from ObjectNet to the web, you include this border. Any time you see an image with a solid 2 pixel red border, that's an indication it's in someone's test set and you should be careful about training on it. Reverse image search will allow you to figure out which test set it is from.\n\nNOTE: original ObjectNet PNG files actually have a 2 pixel red border while their descriptions say 1.", "## Preprocessing Steps for This timm Version\n1. Re-encode PNG images with lossless WebP (~32% reduction in size), keeping red border.\n2. Add 'imagenet_labels' and 'imagenet_synsets' consisting of lists of ImageNet-1k classes that overlap with ObjectNet class." ]
[ 34, 24, 120, 136, 162, 74 ]
[ "passage: TAGS\n#task_categories-image-classification #size_categories-10K<n<100K #license-other #region-us \n# ObjectNet\n\nA webp (lossless) encoded version of ObjectNet-1.0 at original resolution.## License / Usage Terms\n\nObjectNet is free to use for both research and commercial applications. The authors own the source images and allow their use under a license derived from Creative Commons Attribution 4.0 with only two additional clauses.\n\n1. ObjectNet may never be used to tune the parameters of any model.\n2. Any individual images from ObjectNet may only be posted to the web including their 1 pixel red border.\nIf you are using ObjectNet, please cite our work, the citation appears at the bottom of this page. Any derivative of ObjectNet must contain attribution as well.## About\n\nWhat is ObjectNet?\n* A new kind of vision dataset borrowing the idea of controls from other areas of science.\n* No training set, only a test set! Put your vision system through its paces.\n* Collected to intentionally show objects from new viewpoints on new backgrounds.\n* 50,000 image test set, same as ImageNet, with controls for rotation, background, and viewpoint.\n* 313 object classes with 113 overlapping ImageNet\n* Large performance drop, what you can expect from vision systems in the real world!\n* Robust to fine-tuning and a very difficult transfer learning problem## Why the Red Borders / How do I recognize if an image is in ObjectNet?\n\nAs training sets become huge, the risk that test and training sets overlap is serious. We provide ObjectNet with a 2 pixel red border around each image which must be removed before performing inference. The ObjectNet license requires that if you post images from ObjectNet to the web, you include this border. Any time you see an image with a solid 2 pixel red border, that's an indication it's in someone's test set and you should be careful about training on it. Reverse image search will allow you to figure out which test set it is from.\n\nNOTE: original ObjectNet PNG files actually have a 2 pixel red border while their descriptions say 1." ]
52ba19b7243052ecae65acda15329425c9b5ed57
# Dataset Card for Evaluation run of jan-hq/trinity-v1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [jan-hq/trinity-v1](https://huggingface.co/jan-hq/trinity-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_jan-hq__trinity-v1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T19:24:08.553660](https://huggingface.co/datasets/open-llm-leaderboard/details_jan-hq__trinity-v1/blob/main/results_2023-12-16T19-24-08.553660.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6575877329335247, "acc_stderr": 0.031985421208388404, "acc_norm": 0.6571647268300141, "acc_norm_stderr": 0.032648337921958155, "mc1": 0.5507955936352509, "mc1_stderr": 0.01741294198611529, "mc2": 0.6931209356367747, "mc2_stderr": 0.015031530031665238 }, "harness|arc:challenge|25": { "acc": 0.6988054607508533, "acc_stderr": 0.013406741767847632, "acc_norm": 0.7226962457337884, "acc_norm_stderr": 0.013082095839059376 }, "harness|hellaswag|10": { "acc": 0.711113324039036, "acc_stderr": 0.004523188431142894, "acc_norm": 0.8835889265086636, "acc_norm_stderr": 0.0032006176493464752 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6592592592592592, "acc_stderr": 0.040943762699967926, "acc_norm": 0.6592592592592592, "acc_norm_stderr": 0.040943762699967926 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6907894736842105, "acc_stderr": 0.037610708698674805, "acc_norm": 0.6907894736842105, "acc_norm_stderr": 0.037610708698674805 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.62, "acc_stderr": 0.048783173121456316, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.720754716981132, "acc_stderr": 0.027611163402399715, "acc_norm": 0.720754716981132, "acc_norm_stderr": 0.027611163402399715 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7708333333333334, "acc_stderr": 0.03514697467862388, "acc_norm": 0.7708333333333334, "acc_norm_stderr": 0.03514697467862388 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.56, "acc_stderr": 0.049888765156985884, "acc_norm": 0.56, "acc_norm_stderr": 0.049888765156985884 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6994219653179191, "acc_stderr": 0.03496101481191179, "acc_norm": 0.6994219653179191, "acc_norm_stderr": 0.03496101481191179 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.46078431372549017, "acc_stderr": 0.04959859966384181, "acc_norm": 0.46078431372549017, "acc_norm_stderr": 0.04959859966384181 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.042295258468165065, "acc_norm": 0.77, "acc_norm_stderr": 0.042295258468165065 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5787234042553191, "acc_stderr": 0.03227834510146268, "acc_norm": 0.5787234042553191, "acc_norm_stderr": 0.03227834510146268 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5087719298245614, "acc_stderr": 0.04702880432049615, "acc_norm": 0.5087719298245614, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5655172413793104, "acc_stderr": 0.04130740879555498, "acc_norm": 0.5655172413793104, "acc_norm_stderr": 0.04130740879555498 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42328042328042326, "acc_stderr": 0.02544636563440678, "acc_norm": 0.42328042328042326, "acc_norm_stderr": 0.02544636563440678 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.46825396825396826, "acc_stderr": 0.04463112720677172, "acc_norm": 0.46825396825396826, "acc_norm_stderr": 0.04463112720677172 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7806451612903226, "acc_stderr": 0.023540799358723295, "acc_norm": 0.7806451612903226, "acc_norm_stderr": 0.023540799358723295 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5320197044334976, "acc_stderr": 0.03510766597959215, "acc_norm": 0.5320197044334976, "acc_norm_stderr": 0.03510766597959215 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7757575757575758, "acc_stderr": 0.03256866661681102, "acc_norm": 0.7757575757575758, "acc_norm_stderr": 0.03256866661681102 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7878787878787878, "acc_stderr": 0.029126522834586815, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.029126522834586815 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.02150024957603348, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.02150024957603348 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6666666666666666, "acc_stderr": 0.023901157979402538, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.023901157979402538 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34074074074074073, "acc_stderr": 0.028897748741131154, "acc_norm": 0.34074074074074073, "acc_norm_stderr": 0.028897748741131154 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6722689075630253, "acc_stderr": 0.03048991141767323, "acc_norm": 0.6722689075630253, "acc_norm_stderr": 0.03048991141767323 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3509933774834437, "acc_stderr": 0.03896981964257375, "acc_norm": 0.3509933774834437, "acc_norm_stderr": 0.03896981964257375 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8532110091743119, "acc_stderr": 0.01517314184512625, "acc_norm": 0.8532110091743119, "acc_norm_stderr": 0.01517314184512625 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5416666666666666, "acc_stderr": 0.03398110890294636, "acc_norm": 0.5416666666666666, "acc_norm_stderr": 0.03398110890294636 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8333333333333334, "acc_stderr": 0.026156867523931045, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.026156867523931045 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8059071729957806, "acc_stderr": 0.025744902532290916, "acc_norm": 0.8059071729957806, "acc_norm_stderr": 0.025744902532290916 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6905829596412556, "acc_stderr": 0.031024411740572213, "acc_norm": 0.6905829596412556, "acc_norm_stderr": 0.031024411740572213 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8244274809160306, "acc_stderr": 0.033368203384760736, "acc_norm": 0.8244274809160306, "acc_norm_stderr": 0.033368203384760736 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8016528925619835, "acc_stderr": 0.03640118271990947, "acc_norm": 0.8016528925619835, "acc_norm_stderr": 0.03640118271990947 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7777777777777778, "acc_stderr": 0.040191074725573483, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.040191074725573483 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7791411042944786, "acc_stderr": 0.03259177392742179, "acc_norm": 0.7791411042944786, "acc_norm_stderr": 0.03259177392742179 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.41964285714285715, "acc_stderr": 0.046840993210771065, "acc_norm": 0.41964285714285715, "acc_norm_stderr": 0.046840993210771065 }, "harness|hendrycksTest-management|5": { "acc": 0.7572815533980582, "acc_stderr": 0.04245022486384495, "acc_norm": 0.7572815533980582, "acc_norm_stderr": 0.04245022486384495 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8803418803418803, "acc_stderr": 0.021262719400406957, "acc_norm": 0.8803418803418803, "acc_norm_stderr": 0.021262719400406957 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8352490421455939, "acc_stderr": 0.013265346261323797, "acc_norm": 0.8352490421455939, "acc_norm_stderr": 0.013265346261323797 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7398843930635838, "acc_stderr": 0.023618678310069356, "acc_norm": 0.7398843930635838, "acc_norm_stderr": 0.023618678310069356 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4759776536312849, "acc_stderr": 0.016703190189300186, "acc_norm": 0.4759776536312849, "acc_norm_stderr": 0.016703190189300186 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7222222222222222, "acc_stderr": 0.025646863097137897, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.025646863097137897 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7170418006430869, "acc_stderr": 0.02558306248998481, "acc_norm": 0.7170418006430869, "acc_norm_stderr": 0.02558306248998481 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7469135802469136, "acc_stderr": 0.024191808600712995, "acc_norm": 0.7469135802469136, "acc_norm_stderr": 0.024191808600712995 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.48936170212765956, "acc_stderr": 0.029820747191422473, "acc_norm": 0.48936170212765956, "acc_norm_stderr": 0.029820747191422473 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.47392438070404175, "acc_stderr": 0.012752858346533131, "acc_norm": 0.47392438070404175, "acc_norm_stderr": 0.012752858346533131 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6875, "acc_stderr": 0.02815637344037142, "acc_norm": 0.6875, "acc_norm_stderr": 0.02815637344037142 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6715686274509803, "acc_stderr": 0.018999707383162673, "acc_norm": 0.6715686274509803, "acc_norm_stderr": 0.018999707383162673 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7224489795918367, "acc_stderr": 0.028666857790274648, "acc_norm": 0.7224489795918367, "acc_norm_stderr": 0.028666857790274648 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8407960199004975, "acc_stderr": 0.025870646766169136, "acc_norm": 0.8407960199004975, "acc_norm_stderr": 0.025870646766169136 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.0348735088019777, "acc_norm": 0.86, "acc_norm_stderr": 0.0348735088019777 }, "harness|hendrycksTest-virology|5": { "acc": 0.5481927710843374, "acc_stderr": 0.03874371556587953, "acc_norm": 0.5481927710843374, "acc_norm_stderr": 0.03874371556587953 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8245614035087719, "acc_stderr": 0.029170885500727665, "acc_norm": 0.8245614035087719, "acc_norm_stderr": 0.029170885500727665 }, "harness|truthfulqa:mc|0": { "mc1": 0.5507955936352509, "mc1_stderr": 0.01741294198611529, "mc2": 0.6931209356367747, "mc2_stderr": 0.015031530031665238 }, "harness|winogrande|5": { "acc": 0.8200473559589582, "acc_stderr": 0.01079646868806868 }, "harness|gsm8k|5": { "acc": 0.7164518574677786, "acc_stderr": 0.012415070917508124 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_jan-hq__trinity-v1
[ "region:us" ]
2023-12-16T19:27:00+00:00
{"pretty_name": "Evaluation run of jan-hq/trinity-v1", "dataset_summary": "Dataset automatically created during the evaluation run of model [jan-hq/trinity-v1](https://huggingface.co/jan-hq/trinity-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jan-hq__trinity-v1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T19:24:08.553660](https://huggingface.co/datasets/open-llm-leaderboard/details_jan-hq__trinity-v1/blob/main/results_2023-12-16T19-24-08.553660.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6575877329335247,\n \"acc_stderr\": 0.031985421208388404,\n \"acc_norm\": 0.6571647268300141,\n \"acc_norm_stderr\": 0.032648337921958155,\n \"mc1\": 0.5507955936352509,\n \"mc1_stderr\": 0.01741294198611529,\n \"mc2\": 0.6931209356367747,\n \"mc2_stderr\": 0.015031530031665238\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6988054607508533,\n \"acc_stderr\": 0.013406741767847632,\n \"acc_norm\": 0.7226962457337884,\n \"acc_norm_stderr\": 0.013082095839059376\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.711113324039036,\n \"acc_stderr\": 0.004523188431142894,\n \"acc_norm\": 0.8835889265086636,\n \"acc_norm_stderr\": 0.0032006176493464752\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6592592592592592,\n \"acc_stderr\": 0.040943762699967926,\n \"acc_norm\": 0.6592592592592592,\n \"acc_norm_stderr\": 0.040943762699967926\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6907894736842105,\n \"acc_stderr\": 0.037610708698674805,\n \"acc_norm\": 0.6907894736842105,\n \"acc_norm_stderr\": 0.037610708698674805\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.720754716981132,\n \"acc_stderr\": 0.027611163402399715,\n \"acc_norm\": 0.720754716981132,\n \"acc_norm_stderr\": 0.027611163402399715\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7708333333333334,\n \"acc_stderr\": 0.03514697467862388,\n \"acc_norm\": 0.7708333333333334,\n \"acc_norm_stderr\": 0.03514697467862388\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.049888765156985884,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.049888765156985884\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6994219653179191,\n \"acc_stderr\": 0.03496101481191179,\n \"acc_norm\": 0.6994219653179191,\n \"acc_norm_stderr\": 0.03496101481191179\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.46078431372549017,\n \"acc_stderr\": 0.04959859966384181,\n \"acc_norm\": 0.46078431372549017,\n \"acc_norm_stderr\": 0.04959859966384181\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.042295258468165065,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.042295258468165065\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5787234042553191,\n \"acc_stderr\": 0.03227834510146268,\n \"acc_norm\": 0.5787234042553191,\n \"acc_norm_stderr\": 0.03227834510146268\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5087719298245614,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.5087719298245614,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5655172413793104,\n \"acc_stderr\": 0.04130740879555498,\n \"acc_norm\": 0.5655172413793104,\n \"acc_norm_stderr\": 0.04130740879555498\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42328042328042326,\n \"acc_stderr\": 0.02544636563440678,\n \"acc_norm\": 0.42328042328042326,\n \"acc_norm_stderr\": 0.02544636563440678\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.46825396825396826,\n \"acc_stderr\": 0.04463112720677172,\n \"acc_norm\": 0.46825396825396826,\n \"acc_norm_stderr\": 0.04463112720677172\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252604,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252604\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7806451612903226,\n \"acc_stderr\": 0.023540799358723295,\n \"acc_norm\": 0.7806451612903226,\n \"acc_norm_stderr\": 0.023540799358723295\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5320197044334976,\n \"acc_stderr\": 0.03510766597959215,\n \"acc_norm\": 0.5320197044334976,\n \"acc_norm_stderr\": 0.03510766597959215\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.03256866661681102,\n \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.03256866661681102\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.029126522834586815,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.029126522834586815\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.02150024957603348,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.02150024957603348\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.023901157979402538,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.023901157979402538\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34074074074074073,\n \"acc_stderr\": 0.028897748741131154,\n \"acc_norm\": 0.34074074074074073,\n \"acc_norm_stderr\": 0.028897748741131154\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6722689075630253,\n \"acc_stderr\": 0.03048991141767323,\n \"acc_norm\": 0.6722689075630253,\n \"acc_norm_stderr\": 0.03048991141767323\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3509933774834437,\n \"acc_stderr\": 0.03896981964257375,\n \"acc_norm\": 0.3509933774834437,\n \"acc_norm_stderr\": 0.03896981964257375\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8532110091743119,\n \"acc_stderr\": 0.01517314184512625,\n \"acc_norm\": 0.8532110091743119,\n \"acc_norm_stderr\": 0.01517314184512625\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5416666666666666,\n \"acc_stderr\": 0.03398110890294636,\n \"acc_norm\": 0.5416666666666666,\n \"acc_norm_stderr\": 0.03398110890294636\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8333333333333334,\n \"acc_stderr\": 0.026156867523931045,\n \"acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.026156867523931045\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8059071729957806,\n \"acc_stderr\": 0.025744902532290916,\n \"acc_norm\": 0.8059071729957806,\n \"acc_norm_stderr\": 0.025744902532290916\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6905829596412556,\n \"acc_stderr\": 0.031024411740572213,\n \"acc_norm\": 0.6905829596412556,\n \"acc_norm_stderr\": 0.031024411740572213\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8244274809160306,\n \"acc_stderr\": 0.033368203384760736,\n \"acc_norm\": 0.8244274809160306,\n \"acc_norm_stderr\": 0.033368203384760736\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8016528925619835,\n \"acc_stderr\": 0.03640118271990947,\n \"acc_norm\": 0.8016528925619835,\n \"acc_norm_stderr\": 0.03640118271990947\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.040191074725573483,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.040191074725573483\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7791411042944786,\n \"acc_stderr\": 0.03259177392742179,\n \"acc_norm\": 0.7791411042944786,\n \"acc_norm_stderr\": 0.03259177392742179\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.41964285714285715,\n \"acc_stderr\": 0.046840993210771065,\n \"acc_norm\": 0.41964285714285715,\n \"acc_norm_stderr\": 0.046840993210771065\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7572815533980582,\n \"acc_stderr\": 0.04245022486384495,\n \"acc_norm\": 0.7572815533980582,\n \"acc_norm_stderr\": 0.04245022486384495\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8803418803418803,\n \"acc_stderr\": 0.021262719400406957,\n \"acc_norm\": 0.8803418803418803,\n \"acc_norm_stderr\": 0.021262719400406957\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8352490421455939,\n \"acc_stderr\": 0.013265346261323797,\n \"acc_norm\": 0.8352490421455939,\n \"acc_norm_stderr\": 0.013265346261323797\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7398843930635838,\n \"acc_stderr\": 0.023618678310069356,\n \"acc_norm\": 0.7398843930635838,\n \"acc_norm_stderr\": 0.023618678310069356\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4759776536312849,\n \"acc_stderr\": 0.016703190189300186,\n \"acc_norm\": 0.4759776536312849,\n \"acc_norm_stderr\": 0.016703190189300186\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.025646863097137897,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.025646863097137897\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7170418006430869,\n \"acc_stderr\": 0.02558306248998481,\n \"acc_norm\": 0.7170418006430869,\n \"acc_norm_stderr\": 0.02558306248998481\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7469135802469136,\n \"acc_stderr\": 0.024191808600712995,\n \"acc_norm\": 0.7469135802469136,\n \"acc_norm_stderr\": 0.024191808600712995\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.48936170212765956,\n \"acc_stderr\": 0.029820747191422473,\n \"acc_norm\": 0.48936170212765956,\n \"acc_norm_stderr\": 0.029820747191422473\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.47392438070404175,\n \"acc_stderr\": 0.012752858346533131,\n \"acc_norm\": 0.47392438070404175,\n \"acc_norm_stderr\": 0.012752858346533131\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6875,\n \"acc_stderr\": 0.02815637344037142,\n \"acc_norm\": 0.6875,\n \"acc_norm_stderr\": 0.02815637344037142\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6715686274509803,\n \"acc_stderr\": 0.018999707383162673,\n \"acc_norm\": 0.6715686274509803,\n \"acc_norm_stderr\": 0.018999707383162673\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7224489795918367,\n \"acc_stderr\": 0.028666857790274648,\n \"acc_norm\": 0.7224489795918367,\n \"acc_norm_stderr\": 0.028666857790274648\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.025870646766169136,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.025870646766169136\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.0348735088019777,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.0348735088019777\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5481927710843374,\n \"acc_stderr\": 0.03874371556587953,\n \"acc_norm\": 0.5481927710843374,\n \"acc_norm_stderr\": 0.03874371556587953\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8245614035087719,\n \"acc_stderr\": 0.029170885500727665,\n \"acc_norm\": 0.8245614035087719,\n \"acc_norm_stderr\": 0.029170885500727665\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5507955936352509,\n \"mc1_stderr\": 0.01741294198611529,\n \"mc2\": 0.6931209356367747,\n \"mc2_stderr\": 0.015031530031665238\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8200473559589582,\n \"acc_stderr\": 0.01079646868806868\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7164518574677786,\n \"acc_stderr\": 0.012415070917508124\n }\n}\n```", "repo_url": "https://huggingface.co/jan-hq/trinity-v1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|arc:challenge|25_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|gsm8k|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hellaswag|10_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T19-24-08.553660.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["**/details_harness|winogrande|5_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T19-24-08.553660.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T19_24_08.553660", "path": ["results_2023-12-16T19-24-08.553660.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T19-24-08.553660.parquet"]}]}]}
2023-12-16T19:27:40+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of jan-hq/trinity-v1 Dataset automatically created during the evaluation run of model jan-hq/trinity-v1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T19:24:08.553660(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of jan-hq/trinity-v1\n\n\n\nDataset automatically created during the evaluation run of model jan-hq/trinity-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T19:24:08.553660(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of jan-hq/trinity-v1\n\n\n\nDataset automatically created during the evaluation run of model jan-hq/trinity-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T19:24:08.553660(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 181, 66, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of jan-hq/trinity-v1\n\n\n\nDataset automatically created during the evaluation run of model jan-hq/trinity-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T19:24:08.553660(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
0d750c4829bcf2e6419e317be8810f15a9b0b6ec
# Dataset Card for Polish ASR BIGOS corpora ## Table of Contents - [Table of Contents](#table-of-contents) - [Dataset Description](#dataset-description) - [Dataset Summary](#dataset-summary) - [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards) - [Languages](#languages) - [Dataset Structure](#dataset-structure) - [Data Instances](#data-instances) - [Data Fields](#data-fields) - [Data Splits](#data-splits) - [Dataset Creation](#dataset-creation) - [Curation Rationale](#curation-rationale) - [Source Data](#source-data) - [Annotations](#annotations) - [Personal and Sensitive Information](#personal-and-sensitive-information) - [Considerations for Using the Data](#considerations-for-using-the-data) - [Social Impact of Dataset](#social-impact-of-dataset) - [Discussion of Biases](#discussion-of-biases) - [Other Known Limitations](#other-known-limitations) - [Additional Information](#additional-information) - [Dataset Curators](#dataset-curators) - [Licensing Information](#licensing-information) - [Citation Information](#citation-information) - [Contributions](#contributions) ## Dataset Description - **Homepage:** https://huggingface.co/datasets/amu-cai/pl-asr-bigos-v2 - **Repository:** https://github.com/goodmike31/pl-asr-bigos-tools - **Paper:** https://annals-csis.org/proceedings/2023/drp/1609.html - **Leaderboard:** https://huggingface.co/spaces/michaljunczyk/pl-asr-bigos-benchmark - **Point of Contact:** [email protected] ### Dataset Summary The BIGOS (Benchmark Intended Grouping of Open Speech) corpora aims at simplifying the access and use of publicly available ASR speech datasets for Polish.<br> ### Supported Tasks and Leaderboards * Open Polish ASR challenge [PolEval](http://poleval.pl/) using BIGOS V2 and [PELCRA for BIGOS](https://huggingface.co/datasets/pelcra/pl-asr-pelcra-for-bigos) datasets * Evaluation of 3 commercial and 5 freely available on [BIGOS V1](https://huggingface.co/datasets/michaljunczyk/pl-asr-bigos) [(paper)](https://annals-csis.org/proceedings/2023/drp/1609.html). Continous benchmark and leaderboard of PL ASR systems using BIGOS corpora is planned for 2024.<br> ### Languages Polish ## Dataset Structure The datasets consist of audio recordings in the WAV format with corresponding metadata.<br> The audio and metadata can be used in a raw format (TSV) or via the Hugging Face datasets library.<br> References for the test split will only become available after the completion of the 23/24 PolEval challenge.<br> ### Data Instances The train set consists of 82 025 samples. The dev set consists of 14 254 samples The test set consists of 14 993 samples. ### Data Fields Available fields: * `audioname` - file identifier * `split` - test, validation or train split * `dataset` - source dataset identifier * `audio` - binary representation of audio file * `ref_orig` - original transcription of audio file * `samplingrate_orig` - sampling rate of the original recording * `sampling_rate` - sampling rate of recording in the release * `audiopath_bigos` - audio filepath after extraction of tar.gz archive <br><br> ### Data Splits Train split contains recordings intendend for training. Validation split contains recordings for validation during training procedure. Test split contains recordings intended for evaluation only. References for test split are not available until the completion of 23/24 PolEval challenge. | Subset | train | validation | test | | -------------------------- | ------ | ---------- | ----- | | fair-mls-20 | 25 042 | 511 | 519 | | google-fleurs-22 | 2 841 | 338 | 758 | | mailabs-corpus_librivox-19 | 11 834 | 1 527 | 1 501 | | mozilla-common_voice_15-23 | 19 119 | 8 895 | 8 896 | | pjatk-clarin_studio-15 | 10 999 | 1 407 | 1 404 | | pjatk-clarin_mobile-15 | 2 861 | 242 | 392 | | polyai-minds14-21 | 462 | 47 | 53 | | pwr-maleset-unk | 3 783 | 478 | 477 | | pwr-shortwords-unk | 761 | 86 | 92 | | pwr-viu-unk | 2 146 | 290 | 267 | | pwr-azon_read-20 | 1 820 | 382 | 586 | | pwr-azon_spont-20 | 357 | 51 | 48 | ## Dataset Creation ### Curation Rationale [Polish ASR Speech Data Catalog](https://github.com/goodmike31/pl-asr-speech-data-survey) was used to identify suitable datasets which can be repurposed and included in the BIGOS corpora.<br> The following mandatory criteria were considered: * Dataset must be downloadable. * The license must allow for free, noncommercial use. * Transcriptions must be available and align with the recordings. * The sampling rate of audio recordings must be at least 8 kHz. * Audio encoding using a minimum of 16 bits per sample. Recordings which either lacked transcriptions or were too short to be useful for training or evaluation were removed during curation. ### Source Data 12 datasets that meet the criteria were chosen as sources for the BIGOS dataset. * The Common Voice dataset version 15 (mozilla-common_voice_15-23) * The Multilingual LibriSpeech (MLS) dataset (fair-mls-20) * The Clarin Studio Corpus (pjatk-clarin_studio-15) * The Clarin Mobile Corpus (pjatk-clarin_mobile-15) * The Jerzy Sas PWR datasets from Politechnika Wrocławska (pwr-viu-unk, pwr-shortwords-unk, pwr-maleset-unk). More info [here](https://www.ii.pwr.edu.pl/) * The Munich-AI Labs Speech corpus (mailabs-corpus-librivox-19) * The AZON Read and Spontaneous Speech Corpora (pwr-azon_spont-20, pwr-azon_read-20) More info [here](https://zasobynauki.pl/zasoby/korpus-nagran-probek-mowy-do-celow-budowy-modeli-akustycznych-dla-automatycznego-rozpoznawania-mowy) * The Google FLEURS dataset (google-fleurs-22) * The PolyAI minds14 dataset (polyai-minds14-21) <br> #### Initial Data Collection and Normalization Source text and audio files were extracted and encoded in a unified format.<br> Dataset-specific transcription norms are preserved, including punctuation and casing. <br> In case of original dataset does not have test, dev, train splits provided, the splits were generated pseudorandomly during curation. <br> <br> #### Who are the source language producers? 1. Clarin corpora - Polish Japanese Academy of Technology 2. Common Voice - Mozilla foundation 3. Multlingual librispeech - Facebook AI research lab 4. Jerzy Sas and AZON datasets - Politechnika Wrocławska 5. Google - FLEURS 6. PolyAI London - Minds14 Please refer to the [BIGOS V1 paper](https://annals-csis.org/proceedings/2023/drp/1609.html) for more details. ### Annotations #### Annotation process Current release contains original transcriptions. Manual transcriptions of subsets and release of diagnostic dataset are planned for subsequent releases. #### Who are the annotators? Depends on the source dataset. ### Personal and Sensitive Information This corpus does not contain PII or Sensitive Information. All IDs pf speakers are anonymized. ## Considerations for Using the Data ### Social Impact of Dataset To be updated. ### Discussion of Biases To be updated. ### Other Known Limitations The dataset in the initial release contains only a subset of recordings from original datasets. ## Additional Information ### Dataset Curators Original authors of the source datasets - please refer to [source-data](#source-data) for details. Michał Junczyk ([email protected]) - curator of BIGOS corpora. ### Licensing Information The BIGOS corpora is available under [Creative Commons By Attribution Share Alike 4.0 license.](https://creativecommons.org/licenses/by-sa/4.0/) Original datasets used for curation of BIGOS have specific terms of usage that must be understood and agreed to before use. Below are the links to the license terms and datasets the specific license type applies to: * [Creative Commons 0](https://creativecommons.org/share-your-work/public-domain/cc0) which applies to [Common Voice](https://huggingface.co/datasets/mozilla-foundation/common_voice_13_0) * [Creative Commons By Attribution Share Alike 4.0](https://creativecommons.org/licenses/by-sa/4.0/), which applies to [Clarin Cyfry](https://clarin-pl.eu/dspace/handle/11321/317), [Azon acoustic speech resources corpus](https://zasobynauki.pl/zasoby/korpus-nagran-probek-mowy-do-celow-budowy-modeli-akustycznych-dla-automatycznego-rozpoznawania-mowy,53293/). * [Creative Commons By Attribution 3.0](https://creativecommons.org/licenses/by/3.0/), which applies to [CLARIN Mobile database](https://clarin-pl.eu/dspace/handle/11321/237), [CLARIN Studio database](https://clarin-pl.eu/dspace/handle/11321/236), [PELCRA Spelling and Numbers Voice Database](http://pelcra.pl/new/snuv) and [FLEURS dataset](https://huggingface.co/datasets/google/fleurs) * [Creative Commons By Attribution 4.0](https://creativecommons.org/licenses/by/4.0/), which applies to [Multilingual Librispeech](https://huggingface.co/datasets/facebook/multilingual_librispeech) and [Poly AI Minds 14](https://huggingface.co/datasets/PolyAI/minds14) * [Proprietiary License of Munich AI Labs dataset](https://www.caito.de/2019/01/03/the-m-ailabs-speech-dataset) * Public domain mark, which applies to [PWR datasets](https://www.ii.pwr.edu.pl/~sas/ASR/) ### Citation Information Please cite using [Bibtex](https://dblp.org/rec/conf/fedcsis/Junczyk23.html?view=bibtex) ### Contributions Thanks to [@goodmike31](https://github.com/goodmike31) for adding this dataset.
amu-cai/pl-asr-bigos-v2
[ "task_categories:automatic-speech-recognition", "annotations_creators:crowdsourced", "annotations_creators:expert-generated", "annotations_creators:other", "annotations_creators:machine-generated", "language_creators:crowdsourced", "language_creators:expert-generated", "language_creators:other", "multilinguality:monolingual", "size_categories:10K<n<100K", "source_datasets:original", "source_datasets:extended|multilingual_librispeech", "source_datasets:extended|common_voice", "source_datasets:extended|minds14", "source_datasets:extended|fleurs", "language:pl", "license:cc-by-sa-4.0", "benchmark", "polish", "asr", "speech", "dataset", "audio", "region:us" ]
2023-12-16T19:37:45+00:00
{"annotations_creators": ["crowdsourced", "expert-generated", "other", "machine-generated"], "language_creators": ["crowdsourced", "expert-generated", "other"], "language": ["pl"], "license": ["cc-by-sa-4.0"], "multilinguality": ["monolingual"], "size_categories": ["10K<n<100K"], "source_datasets": ["original", "extended|multilingual_librispeech", "extended|common_voice", "extended|minds14", "extended|fleurs"], "task_categories": ["automatic-speech-recognition"], "task_ids": [], "pretty_name": "pl-asr-bigos", "tags": ["benchmark", "polish", "asr", "speech", "dataset", "audio"], "extra_gated_prompt": "Original datasets used for curation of BIGOS have specific terms of usage that must be understood and agreed to before use. Below are the links to the license terms and datasets the specific license type applies to:\n* [Creative Commons 0](https://creativecommons.org/share-your-work/public-domain/cc0) which applies to [Common Voice](https://huggingface.co/datasets/mozilla-foundation/common_voice_13_0)\n* [Creative Commons By Attribution Share Alike 4.0](https://creativecommons.org/licenses/by-sa/4.0/), which applies to [Clarin Cyfry](https://clarin-pl.eu/dspace/handle/11321/317), [Azon acoustic speech resources corpus](https://zasobynauki.pl/zasoby/korpus-nagran-probek-mowy-do-celow-budowy-modeli-akustycznych-dla-automatycznego-rozpoznawania-mowy,53293/).\n* [Creative Commons By Attribution 3.0](https://creativecommons.org/licenses/by/3.0/), which applies to [CLARIN Mobile database](https://clarin-pl.eu/dspace/handle/11321/237), [CLARIN Studio database](https://clarin-pl.eu/dspace/handle/11321/236), [PELCRA Spelling and Numbers Voice Database](http://pelcra.pl/new/snuv) and [FLEURS dataset](https://huggingface.co/datasets/google/fleurs)\n* [Creative Commons By Attribution 4.0](https://creativecommons.org/licenses/by/4.0/), which applies to [Multilingual Librispeech](https://huggingface.co/datasets/facebook/multilingual_librispeech) and [Poly AI Minds 14](https://huggingface.co/datasets/PolyAI/minds14)\n* [Proprietiary License of Munich AI Labs dataset](https://www.caito.de/2019/01/03/the-m-ailabs-speech-dataset)\n* Public domain mark, which applies to [PWR datasets](https://www.ii.pwr.edu.pl/~sas/ASR/)\nTo use selected dataset, you also need to fill in the access forms on the specific datasets pages:\n* Common Voice: https://huggingface.co/datasets/mozilla-foundation/common_voice_13_0", "extra_gated_fields": {"I hereby confirm that I have read and accepted the license terms of datasets comprising BIGOS corpora": "checkbox", "I hereby confirm that I have registered on the original Common Voice page and agree to not attempt to determine the identity of speakers in the Common Voice dataset": "checkbox"}}
2024-02-03T14:49:55+00:00
[]
[ "pl" ]
TAGS #task_categories-automatic-speech-recognition #annotations_creators-crowdsourced #annotations_creators-expert-generated #annotations_creators-other #annotations_creators-machine-generated #language_creators-crowdsourced #language_creators-expert-generated #language_creators-other #multilinguality-monolingual #size_categories-10K<n<100K #source_datasets-original #source_datasets-extended|multilingual_librispeech #source_datasets-extended|common_voice #source_datasets-extended|minds14 #source_datasets-extended|fleurs #language-Polish #license-cc-by-sa-4.0 #benchmark #polish #asr #speech #dataset #audio #region-us
Dataset Card for Polish ASR BIGOS corpora ========================================= Table of Contents ----------------- * Table of Contents * Dataset Description + Dataset Summary + Supported Tasks and Leaderboards + Languages * Dataset Structure + Data Instances + Data Fields + Data Splits * Dataset Creation + Curation Rationale + Source Data + Annotations + Personal and Sensitive Information * Considerations for Using the Data + Social Impact of Dataset + Discussion of Biases + Other Known Limitations * Additional Information + Dataset Curators + Licensing Information + Citation Information + Contributions Dataset Description ------------------- * Homepage: URL * Repository: URL * Paper: URL * Leaderboard: URL * Point of Contact: michal.junczyk@URL ### Dataset Summary The BIGOS (Benchmark Intended Grouping of Open Speech) corpora aims at simplifying the access and use of publicly available ASR speech datasets for Polish. ### Supported Tasks and Leaderboards * Open Polish ASR challenge PolEval using BIGOS V2 and PELCRA for BIGOS datasets * Evaluation of 3 commercial and 5 freely available on BIGOS V1 (paper). Continous benchmark and leaderboard of PL ASR systems using BIGOS corpora is planned for 2024. ### Languages Polish Dataset Structure ----------------- The datasets consist of audio recordings in the WAV format with corresponding metadata. The audio and metadata can be used in a raw format (TSV) or via the Hugging Face datasets library. References for the test split will only become available after the completion of the 23/24 PolEval challenge. ### Data Instances The train set consists of 82 025 samples. The dev set consists of 14 254 samples The test set consists of 14 993 samples. ### Data Fields Available fields: * 'audioname' - file identifier * 'split' - test, validation or train split * 'dataset' - source dataset identifier * 'audio' - binary representation of audio file * 'ref\_orig' - original transcription of audio file * 'samplingrate\_orig' - sampling rate of the original recording * 'sampling\_rate' - sampling rate of recording in the release * 'audiopath\_bigos' - audio filepath after extraction of URL archive ### Data Splits Train split contains recordings intendend for training. Validation split contains recordings for validation during training procedure. Test split contains recordings intended for evaluation only. References for test split are not available until the completion of 23/24 PolEval challenge. Dataset Creation ---------------- ### Curation Rationale Polish ASR Speech Data Catalog was used to identify suitable datasets which can be repurposed and included in the BIGOS corpora. The following mandatory criteria were considered: * Dataset must be downloadable. * The license must allow for free, noncommercial use. * Transcriptions must be available and align with the recordings. * The sampling rate of audio recordings must be at least 8 kHz. * Audio encoding using a minimum of 16 bits per sample. Recordings which either lacked transcriptions or were too short to be useful for training or evaluation were removed during curation. ### Source Data 12 datasets that meet the criteria were chosen as sources for the BIGOS dataset. * The Common Voice dataset version 15 (mozilla-common\_voice\_15-23) * The Multilingual LibriSpeech (MLS) dataset (fair-mls-20) * The Clarin Studio Corpus (pjatk-clarin\_studio-15) * The Clarin Mobile Corpus (pjatk-clarin\_mobile-15) * The Jerzy Sas PWR datasets from Politechnika Wrocławska (pwr-viu-unk, pwr-shortwords-unk, pwr-maleset-unk). More info here * The Munich-AI Labs Speech corpus (mailabs-corpus-librivox-19) * The AZON Read and Spontaneous Speech Corpora (pwr-azon\_spont-20, pwr-azon\_read-20) More info here * The Google FLEURS dataset (google-fleurs-22) * The PolyAI minds14 dataset (polyai-minds14-21) #### Initial Data Collection and Normalization Source text and audio files were extracted and encoded in a unified format. Dataset-specific transcription norms are preserved, including punctuation and casing. In case of original dataset does not have test, dev, train splits provided, the splits were generated pseudorandomly during curation. #### Who are the source language producers? 1. Clarin corpora - Polish Japanese Academy of Technology 2. Common Voice - Mozilla foundation 3. Multlingual librispeech - Facebook AI research lab 4. Jerzy Sas and AZON datasets - Politechnika Wrocławska 5. Google - FLEURS 6. PolyAI London - Minds14 Please refer to the BIGOS V1 paper for more details. ### Annotations #### Annotation process Current release contains original transcriptions. Manual transcriptions of subsets and release of diagnostic dataset are planned for subsequent releases. #### Who are the annotators? Depends on the source dataset. ### Personal and Sensitive Information This corpus does not contain PII or Sensitive Information. All IDs pf speakers are anonymized. Considerations for Using the Data --------------------------------- ### Social Impact of Dataset To be updated. ### Discussion of Biases To be updated. ### Other Known Limitations The dataset in the initial release contains only a subset of recordings from original datasets. Additional Information ---------------------- ### Dataset Curators Original authors of the source datasets - please refer to source-data for details. Michał Junczyk (michal.junczyk@URL) - curator of BIGOS corpora. ### Licensing Information The BIGOS corpora is available under Creative Commons By Attribution Share Alike 4.0 license. Original datasets used for curation of BIGOS have specific terms of usage that must be understood and agreed to before use. Below are the links to the license terms and datasets the specific license type applies to: * Creative Commons 0 which applies to Common Voice * Creative Commons By Attribution Share Alike 4.0, which applies to Clarin Cyfry, Azon acoustic speech resources corpus. * Creative Commons By Attribution 3.0, which applies to CLARIN Mobile database, CLARIN Studio database, PELCRA Spelling and Numbers Voice Database and FLEURS dataset * Creative Commons By Attribution 4.0, which applies to Multilingual Librispeech and Poly AI Minds 14 * Proprietiary License of Munich AI Labs dataset * Public domain mark, which applies to PWR datasets Please cite using Bibtex ### Contributions Thanks to @goodmike31 for adding this dataset.
[ "### Dataset Summary\n\n\nThe BIGOS (Benchmark Intended Grouping of Open Speech) corpora aims at simplifying the access and use of publicly available ASR speech datasets for Polish.", "### Supported Tasks and Leaderboards\n\n\n* Open Polish ASR challenge PolEval using BIGOS V2 and PELCRA for BIGOS datasets\n* Evaluation of 3 commercial and 5 freely available on BIGOS V1 (paper).\n\n\nContinous benchmark and leaderboard of PL ASR systems using BIGOS corpora is planned for 2024.", "### Languages\n\n\nPolish\n\n\nDataset Structure\n-----------------\n\n\nThe datasets consist of audio recordings in the WAV format with corresponding metadata. \n\nThe audio and metadata can be used in a raw format (TSV) or via the Hugging Face datasets library. \n\nReferences for the test split will only become available after the completion of the 23/24 PolEval challenge.", "### Data Instances\n\n\nThe train set consists of 82 025 samples.\nThe dev set consists of 14 254 samples\nThe test set consists of 14 993 samples.", "### Data Fields\n\n\nAvailable fields:\n\n\n* 'audioname' - file identifier\n* 'split' - test, validation or train split\n* 'dataset' - source dataset identifier\n* 'audio' - binary representation of audio file\n* 'ref\\_orig' - original transcription of audio file\n* 'samplingrate\\_orig' - sampling rate of the original recording\n* 'sampling\\_rate' - sampling rate of recording in the release\n* 'audiopath\\_bigos' - audio filepath after extraction of URL archive", "### Data Splits\n\n\nTrain split contains recordings intendend for training.\nValidation split contains recordings for validation during training procedure.\nTest split contains recordings intended for evaluation only.\nReferences for test split are not available until the completion of 23/24 PolEval challenge.\n\n\n\nDataset Creation\n----------------", "### Curation Rationale\n\n\nPolish ASR Speech Data Catalog was used to identify suitable datasets which can be repurposed and included in the BIGOS corpora. \n\nThe following mandatory criteria were considered:\n\n\n* Dataset must be downloadable.\n* The license must allow for free, noncommercial use.\n* Transcriptions must be available and align with the recordings.\n* The sampling rate of audio recordings must be at least 8 kHz.\n* Audio encoding using a minimum of 16 bits per sample.\n\n\nRecordings which either lacked transcriptions or were too short to be useful for training or evaluation were removed during curation.", "### Source Data\n\n\n12 datasets that meet the criteria were chosen as sources for the BIGOS dataset.\n\n\n* The Common Voice dataset version 15 (mozilla-common\\_voice\\_15-23)\n* The Multilingual LibriSpeech (MLS) dataset (fair-mls-20)\n* The Clarin Studio Corpus (pjatk-clarin\\_studio-15)\n* The Clarin Mobile Corpus (pjatk-clarin\\_mobile-15)\n* The Jerzy Sas PWR datasets from Politechnika Wrocławska (pwr-viu-unk, pwr-shortwords-unk, pwr-maleset-unk). More info here\n* The Munich-AI Labs Speech corpus (mailabs-corpus-librivox-19)\n* The AZON Read and Spontaneous Speech Corpora (pwr-azon\\_spont-20, pwr-azon\\_read-20) More info here\n* The Google FLEURS dataset (google-fleurs-22)\n* The PolyAI minds14 dataset (polyai-minds14-21)", "#### Initial Data Collection and Normalization\n\n\nSource text and audio files were extracted and encoded in a unified format. \n\nDataset-specific transcription norms are preserved, including punctuation and casing. \n\nIn case of original dataset does not have test, dev, train splits provided, the splits were generated pseudorandomly during curation.", "#### Who are the source language producers?\n\n\n1. Clarin corpora - Polish Japanese Academy of Technology\n2. Common Voice - Mozilla foundation\n3. Multlingual librispeech - Facebook AI research lab\n4. Jerzy Sas and AZON datasets - Politechnika Wrocławska\n5. Google - FLEURS\n6. PolyAI London - Minds14\n\n\nPlease refer to the BIGOS V1 paper for more details.", "### Annotations", "#### Annotation process\n\n\nCurrent release contains original transcriptions.\nManual transcriptions of subsets and release of diagnostic dataset are planned for subsequent releases.", "#### Who are the annotators?\n\n\nDepends on the source dataset.", "### Personal and Sensitive Information\n\n\nThis corpus does not contain PII or Sensitive Information.\nAll IDs pf speakers are anonymized.\n\n\nConsiderations for Using the Data\n---------------------------------", "### Social Impact of Dataset\n\n\nTo be updated.", "### Discussion of Biases\n\n\nTo be updated.", "### Other Known Limitations\n\n\nThe dataset in the initial release contains only a subset of recordings from original datasets.\n\n\nAdditional Information\n----------------------", "### Dataset Curators\n\n\nOriginal authors of the source datasets - please refer to source-data for details.\n\n\nMichał Junczyk (michal.junczyk@URL) - curator of BIGOS corpora.", "### Licensing Information\n\n\nThe BIGOS corpora is available under Creative Commons By Attribution Share Alike 4.0 license.\n\n\nOriginal datasets used for curation of BIGOS have specific terms of usage that must be understood and agreed to before use. Below are the links to the license terms and datasets the specific license type applies to:\n\n\n* Creative Commons 0 which applies to Common Voice\n* Creative Commons By Attribution Share Alike 4.0, which applies to Clarin Cyfry, Azon acoustic speech resources corpus.\n* Creative Commons By Attribution 3.0, which applies to CLARIN Mobile database, CLARIN Studio database, PELCRA Spelling and Numbers Voice Database and FLEURS dataset\n* Creative Commons By Attribution 4.0, which applies to Multilingual Librispeech and Poly AI Minds 14\n* Proprietiary License of Munich AI Labs dataset\n* Public domain mark, which applies to PWR datasets\n\n\nPlease cite using Bibtex", "### Contributions\n\n\nThanks to @goodmike31 for adding this dataset." ]
[ "TAGS\n#task_categories-automatic-speech-recognition #annotations_creators-crowdsourced #annotations_creators-expert-generated #annotations_creators-other #annotations_creators-machine-generated #language_creators-crowdsourced #language_creators-expert-generated #language_creators-other #multilinguality-monolingual #size_categories-10K<n<100K #source_datasets-original #source_datasets-extended|multilingual_librispeech #source_datasets-extended|common_voice #source_datasets-extended|minds14 #source_datasets-extended|fleurs #language-Polish #license-cc-by-sa-4.0 #benchmark #polish #asr #speech #dataset #audio #region-us \n", "### Dataset Summary\n\n\nThe BIGOS (Benchmark Intended Grouping of Open Speech) corpora aims at simplifying the access and use of publicly available ASR speech datasets for Polish.", "### Supported Tasks and Leaderboards\n\n\n* Open Polish ASR challenge PolEval using BIGOS V2 and PELCRA for BIGOS datasets\n* Evaluation of 3 commercial and 5 freely available on BIGOS V1 (paper).\n\n\nContinous benchmark and leaderboard of PL ASR systems using BIGOS corpora is planned for 2024.", "### Languages\n\n\nPolish\n\n\nDataset Structure\n-----------------\n\n\nThe datasets consist of audio recordings in the WAV format with corresponding metadata. \n\nThe audio and metadata can be used in a raw format (TSV) or via the Hugging Face datasets library. \n\nReferences for the test split will only become available after the completion of the 23/24 PolEval challenge.", "### Data Instances\n\n\nThe train set consists of 82 025 samples.\nThe dev set consists of 14 254 samples\nThe test set consists of 14 993 samples.", "### Data Fields\n\n\nAvailable fields:\n\n\n* 'audioname' - file identifier\n* 'split' - test, validation or train split\n* 'dataset' - source dataset identifier\n* 'audio' - binary representation of audio file\n* 'ref\\_orig' - original transcription of audio file\n* 'samplingrate\\_orig' - sampling rate of the original recording\n* 'sampling\\_rate' - sampling rate of recording in the release\n* 'audiopath\\_bigos' - audio filepath after extraction of URL archive", "### Data Splits\n\n\nTrain split contains recordings intendend for training.\nValidation split contains recordings for validation during training procedure.\nTest split contains recordings intended for evaluation only.\nReferences for test split are not available until the completion of 23/24 PolEval challenge.\n\n\n\nDataset Creation\n----------------", "### Curation Rationale\n\n\nPolish ASR Speech Data Catalog was used to identify suitable datasets which can be repurposed and included in the BIGOS corpora. \n\nThe following mandatory criteria were considered:\n\n\n* Dataset must be downloadable.\n* The license must allow for free, noncommercial use.\n* Transcriptions must be available and align with the recordings.\n* The sampling rate of audio recordings must be at least 8 kHz.\n* Audio encoding using a minimum of 16 bits per sample.\n\n\nRecordings which either lacked transcriptions or were too short to be useful for training or evaluation were removed during curation.", "### Source Data\n\n\n12 datasets that meet the criteria were chosen as sources for the BIGOS dataset.\n\n\n* The Common Voice dataset version 15 (mozilla-common\\_voice\\_15-23)\n* The Multilingual LibriSpeech (MLS) dataset (fair-mls-20)\n* The Clarin Studio Corpus (pjatk-clarin\\_studio-15)\n* The Clarin Mobile Corpus (pjatk-clarin\\_mobile-15)\n* The Jerzy Sas PWR datasets from Politechnika Wrocławska (pwr-viu-unk, pwr-shortwords-unk, pwr-maleset-unk). More info here\n* The Munich-AI Labs Speech corpus (mailabs-corpus-librivox-19)\n* The AZON Read and Spontaneous Speech Corpora (pwr-azon\\_spont-20, pwr-azon\\_read-20) More info here\n* The Google FLEURS dataset (google-fleurs-22)\n* The PolyAI minds14 dataset (polyai-minds14-21)", "#### Initial Data Collection and Normalization\n\n\nSource text and audio files were extracted and encoded in a unified format. \n\nDataset-specific transcription norms are preserved, including punctuation and casing. \n\nIn case of original dataset does not have test, dev, train splits provided, the splits were generated pseudorandomly during curation.", "#### Who are the source language producers?\n\n\n1. Clarin corpora - Polish Japanese Academy of Technology\n2. Common Voice - Mozilla foundation\n3. Multlingual librispeech - Facebook AI research lab\n4. Jerzy Sas and AZON datasets - Politechnika Wrocławska\n5. Google - FLEURS\n6. PolyAI London - Minds14\n\n\nPlease refer to the BIGOS V1 paper for more details.", "### Annotations", "#### Annotation process\n\n\nCurrent release contains original transcriptions.\nManual transcriptions of subsets and release of diagnostic dataset are planned for subsequent releases.", "#### Who are the annotators?\n\n\nDepends on the source dataset.", "### Personal and Sensitive Information\n\n\nThis corpus does not contain PII or Sensitive Information.\nAll IDs pf speakers are anonymized.\n\n\nConsiderations for Using the Data\n---------------------------------", "### Social Impact of Dataset\n\n\nTo be updated.", "### Discussion of Biases\n\n\nTo be updated.", "### Other Known Limitations\n\n\nThe dataset in the initial release contains only a subset of recordings from original datasets.\n\n\nAdditional Information\n----------------------", "### Dataset Curators\n\n\nOriginal authors of the source datasets - please refer to source-data for details.\n\n\nMichał Junczyk (michal.junczyk@URL) - curator of BIGOS corpora.", "### Licensing Information\n\n\nThe BIGOS corpora is available under Creative Commons By Attribution Share Alike 4.0 license.\n\n\nOriginal datasets used for curation of BIGOS have specific terms of usage that must be understood and agreed to before use. Below are the links to the license terms and datasets the specific license type applies to:\n\n\n* Creative Commons 0 which applies to Common Voice\n* Creative Commons By Attribution Share Alike 4.0, which applies to Clarin Cyfry, Azon acoustic speech resources corpus.\n* Creative Commons By Attribution 3.0, which applies to CLARIN Mobile database, CLARIN Studio database, PELCRA Spelling and Numbers Voice Database and FLEURS dataset\n* Creative Commons By Attribution 4.0, which applies to Multilingual Librispeech and Poly AI Minds 14\n* Proprietiary License of Munich AI Labs dataset\n* Public domain mark, which applies to PWR datasets\n\n\nPlease cite using Bibtex", "### Contributions\n\n\nThanks to @goodmike31 for adding this dataset." ]
[ 225, 46, 74, 84, 40, 133, 68, 141, 242, 81, 82, 5, 35, 18, 42, 11, 12, 36, 45, 201, 18 ]
[ "passage: TAGS\n#task_categories-automatic-speech-recognition #annotations_creators-crowdsourced #annotations_creators-expert-generated #annotations_creators-other #annotations_creators-machine-generated #language_creators-crowdsourced #language_creators-expert-generated #language_creators-other #multilinguality-monolingual #size_categories-10K<n<100K #source_datasets-original #source_datasets-extended|multilingual_librispeech #source_datasets-extended|common_voice #source_datasets-extended|minds14 #source_datasets-extended|fleurs #language-Polish #license-cc-by-sa-4.0 #benchmark #polish #asr #speech #dataset #audio #region-us \n### Dataset Summary\n\n\nThe BIGOS (Benchmark Intended Grouping of Open Speech) corpora aims at simplifying the access and use of publicly available ASR speech datasets for Polish.### Supported Tasks and Leaderboards\n\n\n* Open Polish ASR challenge PolEval using BIGOS V2 and PELCRA for BIGOS datasets\n* Evaluation of 3 commercial and 5 freely available on BIGOS V1 (paper).\n\n\nContinous benchmark and leaderboard of PL ASR systems using BIGOS corpora is planned for 2024.### Languages\n\n\nPolish\n\n\nDataset Structure\n-----------------\n\n\nThe datasets consist of audio recordings in the WAV format with corresponding metadata. \n\nThe audio and metadata can be used in a raw format (TSV) or via the Hugging Face datasets library. \n\nReferences for the test split will only become available after the completion of the 23/24 PolEval challenge.### Data Instances\n\n\nThe train set consists of 82 025 samples.\nThe dev set consists of 14 254 samples\nThe test set consists of 14 993 samples.", "passage: ### Data Fields\n\n\nAvailable fields:\n\n\n* 'audioname' - file identifier\n* 'split' - test, validation or train split\n* 'dataset' - source dataset identifier\n* 'audio' - binary representation of audio file\n* 'ref\\_orig' - original transcription of audio file\n* 'samplingrate\\_orig' - sampling rate of the original recording\n* 'sampling\\_rate' - sampling rate of recording in the release\n* 'audiopath\\_bigos' - audio filepath after extraction of URL archive### Data Splits\n\n\nTrain split contains recordings intendend for training.\nValidation split contains recordings for validation during training procedure.\nTest split contains recordings intended for evaluation only.\nReferences for test split are not available until the completion of 23/24 PolEval challenge.\n\n\n\nDataset Creation\n----------------### Curation Rationale\n\n\nPolish ASR Speech Data Catalog was used to identify suitable datasets which can be repurposed and included in the BIGOS corpora. \n\nThe following mandatory criteria were considered:\n\n\n* Dataset must be downloadable.\n* The license must allow for free, noncommercial use.\n* Transcriptions must be available and align with the recordings.\n* The sampling rate of audio recordings must be at least 8 kHz.\n* Audio encoding using a minimum of 16 bits per sample.\n\n\nRecordings which either lacked transcriptions or were too short to be useful for training or evaluation were removed during curation.### Source Data\n\n\n12 datasets that meet the criteria were chosen as sources for the BIGOS dataset.\n\n\n* The Common Voice dataset version 15 (mozilla-common\\_voice\\_15-23)\n* The Multilingual LibriSpeech (MLS) dataset (fair-mls-20)\n* The Clarin Studio Corpus (pjatk-clarin\\_studio-15)\n* The Clarin Mobile Corpus (pjatk-clarin\\_mobile-15)\n* The Jerzy Sas PWR datasets from Politechnika Wrocławska (pwr-viu-unk, pwr-shortwords-unk, pwr-maleset-unk). More info here\n* The Munich-AI Labs Speech corpus (mailabs-corpus-librivox-19)\n* The AZON Read and Spontaneous Speech Corpora (pwr-azon\\_spont-20, pwr-azon\\_read-20) More info here\n* The Google FLEURS dataset (google-fleurs-22)\n* The PolyAI minds14 dataset (polyai-minds14-21)" ]
4be72741626d6ed96c65995783290ac72e30daf6
# Dataset Card for Evaluation run of mncai/mistral-7b-dpo-v6 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [mncai/mistral-7b-dpo-v6](https://huggingface.co/mncai/mistral-7b-dpo-v6) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_mncai__mistral-7b-dpo-v6", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T20:12:40.545634](https://huggingface.co/datasets/open-llm-leaderboard/details_mncai__mistral-7b-dpo-v6/blob/main/results_2023-12-16T20-12-40.545634.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6525100354141533, "acc_stderr": 0.032048485515607635, "acc_norm": 0.6521274074305052, "acc_norm_stderr": 0.03271226334016835, "mc1": 0.5483476132190942, "mc1_stderr": 0.01742148030027764, "mc2": 0.6823900041375971, "mc2_stderr": 0.015243369336298 }, "harness|arc:challenge|25": { "acc": 0.6919795221843004, "acc_stderr": 0.013491429517292038, "acc_norm": 0.7252559726962458, "acc_norm_stderr": 0.013044617212771227 }, "harness|hellaswag|10": { "acc": 0.7074287990440151, "acc_stderr": 0.004540134005060321, "acc_norm": 0.8809998008364868, "acc_norm_stderr": 0.003231270127834668 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6666666666666666, "acc_stderr": 0.04072314811876837, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.04072314811876837 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6973684210526315, "acc_stderr": 0.037385206761196686, "acc_norm": 0.6973684210526315, "acc_norm_stderr": 0.037385206761196686 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.6, "acc_stderr": 0.04923659639173309, "acc_norm": 0.6, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7245283018867924, "acc_stderr": 0.027495663683724053, "acc_norm": 0.7245283018867924, "acc_norm_stderr": 0.027495663683724053 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7708333333333334, "acc_stderr": 0.03514697467862388, "acc_norm": 0.7708333333333334, "acc_norm_stderr": 0.03514697467862388 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6763005780346821, "acc_stderr": 0.035676037996391706, "acc_norm": 0.6763005780346821, "acc_norm_stderr": 0.035676037996391706 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4411764705882353, "acc_stderr": 0.049406356306056595, "acc_norm": 0.4411764705882353, "acc_norm_stderr": 0.049406356306056595 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.78, "acc_stderr": 0.04163331998932263, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932263 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5574468085106383, "acc_stderr": 0.03246956919789958, "acc_norm": 0.5574468085106383, "acc_norm_stderr": 0.03246956919789958 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5087719298245614, "acc_stderr": 0.04702880432049615, "acc_norm": 0.5087719298245614, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5655172413793104, "acc_stderr": 0.04130740879555498, "acc_norm": 0.5655172413793104, "acc_norm_stderr": 0.04130740879555498 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42328042328042326, "acc_stderr": 0.025446365634406783, "acc_norm": 0.42328042328042326, "acc_norm_stderr": 0.025446365634406783 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4603174603174603, "acc_stderr": 0.04458029125470973, "acc_norm": 0.4603174603174603, "acc_norm_stderr": 0.04458029125470973 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.37, "acc_stderr": 0.048523658709391, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7838709677419354, "acc_stderr": 0.02341529343356853, "acc_norm": 0.7838709677419354, "acc_norm_stderr": 0.02341529343356853 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5221674876847291, "acc_stderr": 0.03514528562175007, "acc_norm": 0.5221674876847291, "acc_norm_stderr": 0.03514528562175007 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7757575757575758, "acc_stderr": 0.03256866661681102, "acc_norm": 0.7757575757575758, "acc_norm_stderr": 0.03256866661681102 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7878787878787878, "acc_stderr": 0.029126522834586818, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.029126522834586818 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8911917098445595, "acc_stderr": 0.022473253332768766, "acc_norm": 0.8911917098445595, "acc_norm_stderr": 0.022473253332768766 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6615384615384615, "acc_stderr": 0.023991500500313036, "acc_norm": 0.6615384615384615, "acc_norm_stderr": 0.023991500500313036 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.31851851851851853, "acc_stderr": 0.028406533090608456, "acc_norm": 0.31851851851851853, "acc_norm_stderr": 0.028406533090608456 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6470588235294118, "acc_stderr": 0.031041941304059278, "acc_norm": 0.6470588235294118, "acc_norm_stderr": 0.031041941304059278 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31125827814569534, "acc_stderr": 0.03780445850526732, "acc_norm": 0.31125827814569534, "acc_norm_stderr": 0.03780445850526732 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8477064220183487, "acc_stderr": 0.015405084393157074, "acc_norm": 0.8477064220183487, "acc_norm_stderr": 0.015405084393157074 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5185185185185185, "acc_stderr": 0.034076320938540516, "acc_norm": 0.5185185185185185, "acc_norm_stderr": 0.034076320938540516 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8284313725490197, "acc_stderr": 0.026460569561240644, "acc_norm": 0.8284313725490197, "acc_norm_stderr": 0.026460569561240644 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7848101265822784, "acc_stderr": 0.02675082699467617, "acc_norm": 0.7848101265822784, "acc_norm_stderr": 0.02675082699467617 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6860986547085202, "acc_stderr": 0.031146796482972465, "acc_norm": 0.6860986547085202, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.816793893129771, "acc_stderr": 0.03392770926494733, "acc_norm": 0.816793893129771, "acc_norm_stderr": 0.03392770926494733 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8016528925619835, "acc_stderr": 0.03640118271990947, "acc_norm": 0.8016528925619835, "acc_norm_stderr": 0.03640118271990947 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7592592592592593, "acc_stderr": 0.04133119440243839, "acc_norm": 0.7592592592592593, "acc_norm_stderr": 0.04133119440243839 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7730061349693251, "acc_stderr": 0.03291099578615769, "acc_norm": 0.7730061349693251, "acc_norm_stderr": 0.03291099578615769 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.41964285714285715, "acc_stderr": 0.04684099321077106, "acc_norm": 0.41964285714285715, "acc_norm_stderr": 0.04684099321077106 }, "harness|hendrycksTest-management|5": { "acc": 0.7766990291262136, "acc_stderr": 0.04123553189891431, "acc_norm": 0.7766990291262136, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8760683760683761, "acc_stderr": 0.021586494001281372, "acc_norm": 0.8760683760683761, "acc_norm_stderr": 0.021586494001281372 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8275862068965517, "acc_stderr": 0.013507943909371802, "acc_norm": 0.8275862068965517, "acc_norm_stderr": 0.013507943909371802 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7398843930635838, "acc_stderr": 0.023618678310069356, "acc_norm": 0.7398843930635838, "acc_norm_stderr": 0.023618678310069356 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4782122905027933, "acc_stderr": 0.016706617522176132, "acc_norm": 0.4782122905027933, "acc_norm_stderr": 0.016706617522176132 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7222222222222222, "acc_stderr": 0.025646863097137894, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.025646863097137894 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7170418006430869, "acc_stderr": 0.02558306248998481, "acc_norm": 0.7170418006430869, "acc_norm_stderr": 0.02558306248998481 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7407407407407407, "acc_stderr": 0.02438366553103545, "acc_norm": 0.7407407407407407, "acc_norm_stderr": 0.02438366553103545 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4716312056737589, "acc_stderr": 0.02977945095730307, "acc_norm": 0.4716312056737589, "acc_norm_stderr": 0.02977945095730307 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46870925684485004, "acc_stderr": 0.012745204626083136, "acc_norm": 0.46870925684485004, "acc_norm_stderr": 0.012745204626083136 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6875, "acc_stderr": 0.02815637344037142, "acc_norm": 0.6875, "acc_norm_stderr": 0.02815637344037142 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6683006535947712, "acc_stderr": 0.01904748523936038, "acc_norm": 0.6683006535947712, "acc_norm_stderr": 0.01904748523936038 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6818181818181818, "acc_stderr": 0.044612721759105085, "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.044612721759105085 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7387755102040816, "acc_stderr": 0.028123429335142773, "acc_norm": 0.7387755102040816, "acc_norm_stderr": 0.028123429335142773 }, "harness|hendrycksTest-sociology|5": { "acc": 0.835820895522388, "acc_stderr": 0.026193923544454115, "acc_norm": 0.835820895522388, "acc_norm_stderr": 0.026193923544454115 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.87, "acc_stderr": 0.03379976689896309, "acc_norm": 0.87, "acc_norm_stderr": 0.03379976689896309 }, "harness|hendrycksTest-virology|5": { "acc": 0.5421686746987951, "acc_stderr": 0.038786267710023595, "acc_norm": 0.5421686746987951, "acc_norm_stderr": 0.038786267710023595 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8187134502923976, "acc_stderr": 0.029547741687640044, "acc_norm": 0.8187134502923976, "acc_norm_stderr": 0.029547741687640044 }, "harness|truthfulqa:mc|0": { "mc1": 0.5483476132190942, "mc1_stderr": 0.01742148030027764, "mc2": 0.6823900041375971, "mc2_stderr": 0.015243369336298 }, "harness|winogrande|5": { "acc": 0.8255722178374112, "acc_stderr": 0.010665187902498435 }, "harness|gsm8k|5": { "acc": 0.7088703563305534, "acc_stderr": 0.012513215297888463 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_mncai__mistral-7b-dpo-v6
[ "region:us" ]
2023-12-16T20:15:31+00:00
{"pretty_name": "Evaluation run of mncai/mistral-7b-dpo-v6", "dataset_summary": "Dataset automatically created during the evaluation run of model [mncai/mistral-7b-dpo-v6](https://huggingface.co/mncai/mistral-7b-dpo-v6) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_mncai__mistral-7b-dpo-v6\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T20:12:40.545634](https://huggingface.co/datasets/open-llm-leaderboard/details_mncai__mistral-7b-dpo-v6/blob/main/results_2023-12-16T20-12-40.545634.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6525100354141533,\n \"acc_stderr\": 0.032048485515607635,\n \"acc_norm\": 0.6521274074305052,\n \"acc_norm_stderr\": 0.03271226334016835,\n \"mc1\": 0.5483476132190942,\n \"mc1_stderr\": 0.01742148030027764,\n \"mc2\": 0.6823900041375971,\n \"mc2_stderr\": 0.015243369336298\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6919795221843004,\n \"acc_stderr\": 0.013491429517292038,\n \"acc_norm\": 0.7252559726962458,\n \"acc_norm_stderr\": 0.013044617212771227\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7074287990440151,\n \"acc_stderr\": 0.004540134005060321,\n \"acc_norm\": 0.8809998008364868,\n \"acc_norm_stderr\": 0.003231270127834668\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.04072314811876837,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.04072314811876837\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6973684210526315,\n \"acc_stderr\": 0.037385206761196686,\n \"acc_norm\": 0.6973684210526315,\n \"acc_norm_stderr\": 0.037385206761196686\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7245283018867924,\n \"acc_stderr\": 0.027495663683724053,\n \"acc_norm\": 0.7245283018867924,\n \"acc_norm_stderr\": 0.027495663683724053\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7708333333333334,\n \"acc_stderr\": 0.03514697467862388,\n \"acc_norm\": 0.7708333333333334,\n \"acc_norm_stderr\": 0.03514697467862388\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6763005780346821,\n \"acc_stderr\": 0.035676037996391706,\n \"acc_norm\": 0.6763005780346821,\n \"acc_norm_stderr\": 0.035676037996391706\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4411764705882353,\n \"acc_stderr\": 0.049406356306056595,\n \"acc_norm\": 0.4411764705882353,\n \"acc_norm_stderr\": 0.049406356306056595\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932263,\n \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.04163331998932263\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5574468085106383,\n \"acc_stderr\": 0.03246956919789958,\n \"acc_norm\": 0.5574468085106383,\n \"acc_norm_stderr\": 0.03246956919789958\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5087719298245614,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.5087719298245614,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5655172413793104,\n \"acc_stderr\": 0.04130740879555498,\n \"acc_norm\": 0.5655172413793104,\n \"acc_norm_stderr\": 0.04130740879555498\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42328042328042326,\n \"acc_stderr\": 0.025446365634406783,\n \"acc_norm\": 0.42328042328042326,\n \"acc_norm_stderr\": 0.025446365634406783\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4603174603174603,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.4603174603174603,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7838709677419354,\n \"acc_stderr\": 0.02341529343356853,\n \"acc_norm\": 0.7838709677419354,\n \"acc_norm_stderr\": 0.02341529343356853\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5221674876847291,\n \"acc_stderr\": 0.03514528562175007,\n \"acc_norm\": 0.5221674876847291,\n \"acc_norm_stderr\": 0.03514528562175007\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.03256866661681102,\n \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.03256866661681102\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.029126522834586818,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.029126522834586818\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8911917098445595,\n \"acc_stderr\": 0.022473253332768766,\n \"acc_norm\": 0.8911917098445595,\n \"acc_norm_stderr\": 0.022473253332768766\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6615384615384615,\n \"acc_stderr\": 0.023991500500313036,\n \"acc_norm\": 0.6615384615384615,\n \"acc_norm_stderr\": 0.023991500500313036\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.31851851851851853,\n \"acc_stderr\": 0.028406533090608456,\n \"acc_norm\": 0.31851851851851853,\n \"acc_norm_stderr\": 0.028406533090608456\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6470588235294118,\n \"acc_stderr\": 0.031041941304059278,\n \"acc_norm\": 0.6470588235294118,\n \"acc_norm_stderr\": 0.031041941304059278\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31125827814569534,\n \"acc_stderr\": 0.03780445850526732,\n \"acc_norm\": 0.31125827814569534,\n \"acc_norm_stderr\": 0.03780445850526732\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8477064220183487,\n \"acc_stderr\": 0.015405084393157074,\n \"acc_norm\": 0.8477064220183487,\n \"acc_norm_stderr\": 0.015405084393157074\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5185185185185185,\n \"acc_stderr\": 0.034076320938540516,\n \"acc_norm\": 0.5185185185185185,\n \"acc_norm_stderr\": 0.034076320938540516\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8284313725490197,\n \"acc_stderr\": 0.026460569561240644,\n \"acc_norm\": 0.8284313725490197,\n \"acc_norm_stderr\": 0.026460569561240644\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7848101265822784,\n \"acc_stderr\": 0.02675082699467617,\n \"acc_norm\": 0.7848101265822784,\n \"acc_norm_stderr\": 0.02675082699467617\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6860986547085202,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.6860986547085202,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.816793893129771,\n \"acc_stderr\": 0.03392770926494733,\n \"acc_norm\": 0.816793893129771,\n \"acc_norm_stderr\": 0.03392770926494733\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8016528925619835,\n \"acc_stderr\": 0.03640118271990947,\n \"acc_norm\": 0.8016528925619835,\n \"acc_norm_stderr\": 0.03640118271990947\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7592592592592593,\n \"acc_stderr\": 0.04133119440243839,\n \"acc_norm\": 0.7592592592592593,\n \"acc_norm_stderr\": 0.04133119440243839\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7730061349693251,\n \"acc_stderr\": 0.03291099578615769,\n \"acc_norm\": 0.7730061349693251,\n \"acc_norm_stderr\": 0.03291099578615769\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.41964285714285715,\n \"acc_stderr\": 0.04684099321077106,\n \"acc_norm\": 0.41964285714285715,\n \"acc_norm_stderr\": 0.04684099321077106\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8760683760683761,\n \"acc_stderr\": 0.021586494001281372,\n \"acc_norm\": 0.8760683760683761,\n \"acc_norm_stderr\": 0.021586494001281372\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8275862068965517,\n \"acc_stderr\": 0.013507943909371802,\n \"acc_norm\": 0.8275862068965517,\n \"acc_norm_stderr\": 0.013507943909371802\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7398843930635838,\n \"acc_stderr\": 0.023618678310069356,\n \"acc_norm\": 0.7398843930635838,\n \"acc_norm_stderr\": 0.023618678310069356\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4782122905027933,\n \"acc_stderr\": 0.016706617522176132,\n \"acc_norm\": 0.4782122905027933,\n \"acc_norm_stderr\": 0.016706617522176132\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.025646863097137894,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.025646863097137894\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7170418006430869,\n \"acc_stderr\": 0.02558306248998481,\n \"acc_norm\": 0.7170418006430869,\n \"acc_norm_stderr\": 0.02558306248998481\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7407407407407407,\n \"acc_stderr\": 0.02438366553103545,\n \"acc_norm\": 0.7407407407407407,\n \"acc_norm_stderr\": 0.02438366553103545\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4716312056737589,\n \"acc_stderr\": 0.02977945095730307,\n \"acc_norm\": 0.4716312056737589,\n \"acc_norm_stderr\": 0.02977945095730307\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46870925684485004,\n \"acc_stderr\": 0.012745204626083136,\n \"acc_norm\": 0.46870925684485004,\n \"acc_norm_stderr\": 0.012745204626083136\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6875,\n \"acc_stderr\": 0.02815637344037142,\n \"acc_norm\": 0.6875,\n \"acc_norm_stderr\": 0.02815637344037142\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6683006535947712,\n \"acc_stderr\": 0.01904748523936038,\n \"acc_norm\": 0.6683006535947712,\n \"acc_norm_stderr\": 0.01904748523936038\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6818181818181818,\n \"acc_stderr\": 0.044612721759105085,\n \"acc_norm\": 0.6818181818181818,\n \"acc_norm_stderr\": 0.044612721759105085\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7387755102040816,\n \"acc_stderr\": 0.028123429335142773,\n \"acc_norm\": 0.7387755102040816,\n \"acc_norm_stderr\": 0.028123429335142773\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.026193923544454115,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.026193923544454115\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.87,\n \"acc_stderr\": 0.03379976689896309,\n \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.03379976689896309\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5421686746987951,\n \"acc_stderr\": 0.038786267710023595,\n \"acc_norm\": 0.5421686746987951,\n \"acc_norm_stderr\": 0.038786267710023595\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8187134502923976,\n \"acc_stderr\": 0.029547741687640044,\n \"acc_norm\": 0.8187134502923976,\n \"acc_norm_stderr\": 0.029547741687640044\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5483476132190942,\n \"mc1_stderr\": 0.01742148030027764,\n \"mc2\": 0.6823900041375971,\n \"mc2_stderr\": 0.015243369336298\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8255722178374112,\n \"acc_stderr\": 0.010665187902498435\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7088703563305534,\n \"acc_stderr\": 0.012513215297888463\n }\n}\n```", "repo_url": "https://huggingface.co/mncai/mistral-7b-dpo-v6", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|arc:challenge|25_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|gsm8k|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hellaswag|10_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T20-12-40.545634.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["**/details_harness|winogrande|5_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T20-12-40.545634.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T20_12_40.545634", "path": ["results_2023-12-16T20-12-40.545634.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T20-12-40.545634.parquet"]}]}]}
2023-12-16T20:16:15+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of mncai/mistral-7b-dpo-v6 Dataset automatically created during the evaluation run of model mncai/mistral-7b-dpo-v6 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T20:12:40.545634(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of mncai/mistral-7b-dpo-v6\n\n\n\nDataset automatically created during the evaluation run of model mncai/mistral-7b-dpo-v6 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T20:12:40.545634(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of mncai/mistral-7b-dpo-v6\n\n\n\nDataset automatically created during the evaluation run of model mncai/mistral-7b-dpo-v6 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T20:12:40.545634(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 189, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of mncai/mistral-7b-dpo-v6\n\n\n\nDataset automatically created during the evaluation run of model mncai/mistral-7b-dpo-v6 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T20:12:40.545634(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
919ba2873b012f882a3b8ce2a6e53a34bb27fe25
# Dataset Card for Evaluation run of GreenNode/GreenNodeLM-7B-v4leo <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [GreenNode/GreenNodeLM-7B-v4leo](https://huggingface.co/GreenNode/GreenNodeLM-7B-v4leo) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_GreenNode__GreenNodeLM-7B-v4leo", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T20:58:30.002770](https://huggingface.co/datasets/open-llm-leaderboard/details_GreenNode__GreenNodeLM-7B-v4leo/blob/main/results_2023-12-16T20-58-30.002770.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6550149914285618, "acc_stderr": 0.03201980400748622, "acc_norm": 0.6550580572754082, "acc_norm_stderr": 0.03267824279371986, "mc1": 0.5483476132190942, "mc1_stderr": 0.01742148030027764, "mc2": 0.6965131744948723, "mc2_stderr": 0.01496885686799417 }, "harness|arc:challenge|25": { "acc": 0.6825938566552902, "acc_stderr": 0.013602239088038167, "acc_norm": 0.712457337883959, "acc_norm_stderr": 0.013226719056266127 }, "harness|hellaswag|10": { "acc": 0.7104162517426807, "acc_stderr": 0.004526422125860673, "acc_norm": 0.8823939454291974, "acc_norm_stderr": 0.0032148270694168255 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6666666666666666, "acc_stderr": 0.04072314811876837, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.04072314811876837 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6973684210526315, "acc_stderr": 0.037385206761196686, "acc_norm": 0.6973684210526315, "acc_norm_stderr": 0.037385206761196686 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.720754716981132, "acc_stderr": 0.027611163402399715, "acc_norm": 0.720754716981132, "acc_norm_stderr": 0.027611163402399715 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7638888888888888, "acc_stderr": 0.03551446610810826, "acc_norm": 0.7638888888888888, "acc_norm_stderr": 0.03551446610810826 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6705202312138728, "acc_stderr": 0.03583901754736412, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.03583901754736412 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.43137254901960786, "acc_stderr": 0.04928099597287534, "acc_norm": 0.43137254901960786, "acc_norm_stderr": 0.04928099597287534 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.78, "acc_stderr": 0.04163331998932263, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932263 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.574468085106383, "acc_stderr": 0.03232146916224468, "acc_norm": 0.574468085106383, "acc_norm_stderr": 0.03232146916224468 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.04702880432049615, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5793103448275863, "acc_stderr": 0.0411391498118926, "acc_norm": 0.5793103448275863, "acc_norm_stderr": 0.0411391498118926 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42328042328042326, "acc_stderr": 0.025446365634406783, "acc_norm": 0.42328042328042326, "acc_norm_stderr": 0.025446365634406783 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.46825396825396826, "acc_stderr": 0.04463112720677172, "acc_norm": 0.46825396825396826, "acc_norm_stderr": 0.04463112720677172 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7870967741935484, "acc_stderr": 0.023287665127268542, "acc_norm": 0.7870967741935484, "acc_norm_stderr": 0.023287665127268542 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5123152709359606, "acc_stderr": 0.035169204442208966, "acc_norm": 0.5123152709359606, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.04512608598542127, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7878787878787878, "acc_stderr": 0.031922715695483, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.031922715695483 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7828282828282829, "acc_stderr": 0.02937661648494563, "acc_norm": 0.7828282828282829, "acc_norm_stderr": 0.02937661648494563 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8860103626943006, "acc_stderr": 0.022935144053919436, "acc_norm": 0.8860103626943006, "acc_norm_stderr": 0.022935144053919436 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6871794871794872, "acc_stderr": 0.023507579020645358, "acc_norm": 0.6871794871794872, "acc_norm_stderr": 0.023507579020645358 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.337037037037037, "acc_stderr": 0.02882088466625326, "acc_norm": 0.337037037037037, "acc_norm_stderr": 0.02882088466625326 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6890756302521008, "acc_stderr": 0.03006676158297793, "acc_norm": 0.6890756302521008, "acc_norm_stderr": 0.03006676158297793 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3443708609271523, "acc_stderr": 0.038796870240733264, "acc_norm": 0.3443708609271523, "acc_norm_stderr": 0.038796870240733264 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8458715596330275, "acc_stderr": 0.015480826865374303, "acc_norm": 0.8458715596330275, "acc_norm_stderr": 0.015480826865374303 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5277777777777778, "acc_stderr": 0.0340470532865388, "acc_norm": 0.5277777777777778, "acc_norm_stderr": 0.0340470532865388 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8333333333333334, "acc_stderr": 0.02615686752393104, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.02615686752393104 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8059071729957806, "acc_stderr": 0.025744902532290913, "acc_norm": 0.8059071729957806, "acc_norm_stderr": 0.025744902532290913 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6771300448430493, "acc_stderr": 0.031381476375754995, "acc_norm": 0.6771300448430493, "acc_norm_stderr": 0.031381476375754995 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8015267175572519, "acc_stderr": 0.03498149385462472, "acc_norm": 0.8015267175572519, "acc_norm_stderr": 0.03498149385462472 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.037494924487096966, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.037494924487096966 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7870370370370371, "acc_stderr": 0.0395783547198098, "acc_norm": 0.7870370370370371, "acc_norm_stderr": 0.0395783547198098 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7668711656441718, "acc_stderr": 0.0332201579577674, "acc_norm": 0.7668711656441718, "acc_norm_stderr": 0.0332201579577674 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4732142857142857, "acc_stderr": 0.047389751192741546, "acc_norm": 0.4732142857142857, "acc_norm_stderr": 0.047389751192741546 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8717948717948718, "acc_stderr": 0.02190190511507333, "acc_norm": 0.8717948717948718, "acc_norm_stderr": 0.02190190511507333 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8339719029374202, "acc_stderr": 0.0133064782430663, "acc_norm": 0.8339719029374202, "acc_norm_stderr": 0.0133064782430663 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7254335260115607, "acc_stderr": 0.024027745155265026, "acc_norm": 0.7254335260115607, "acc_norm_stderr": 0.024027745155265026 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4770949720670391, "acc_stderr": 0.016704945740326185, "acc_norm": 0.4770949720670391, "acc_norm_stderr": 0.016704945740326185 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7058823529411765, "acc_stderr": 0.026090162504279053, "acc_norm": 0.7058823529411765, "acc_norm_stderr": 0.026090162504279053 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7266881028938906, "acc_stderr": 0.02531176597542612, "acc_norm": 0.7266881028938906, "acc_norm_stderr": 0.02531176597542612 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7561728395061729, "acc_stderr": 0.02389187954195961, "acc_norm": 0.7561728395061729, "acc_norm_stderr": 0.02389187954195961 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.475177304964539, "acc_stderr": 0.029790719243829727, "acc_norm": 0.475177304964539, "acc_norm_stderr": 0.029790719243829727 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46740547588005216, "acc_stderr": 0.01274307294265335, "acc_norm": 0.46740547588005216, "acc_norm_stderr": 0.01274307294265335 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6801470588235294, "acc_stderr": 0.02833295951403121, "acc_norm": 0.6801470588235294, "acc_norm_stderr": 0.02833295951403121 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6764705882352942, "acc_stderr": 0.018926082916083383, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.018926082916083383 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7, "acc_stderr": 0.04389311454644287, "acc_norm": 0.7, "acc_norm_stderr": 0.04389311454644287 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7224489795918367, "acc_stderr": 0.028666857790274648, "acc_norm": 0.7224489795918367, "acc_norm_stderr": 0.028666857790274648 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8407960199004975, "acc_stderr": 0.02587064676616913, "acc_norm": 0.8407960199004975, "acc_norm_stderr": 0.02587064676616913 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.03487350880197769, "acc_norm": 0.86, "acc_norm_stderr": 0.03487350880197769 }, "harness|hendrycksTest-virology|5": { "acc": 0.536144578313253, "acc_stderr": 0.038823108508905954, "acc_norm": 0.536144578313253, "acc_norm_stderr": 0.038823108508905954 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8362573099415205, "acc_stderr": 0.028380919596145866, "acc_norm": 0.8362573099415205, "acc_norm_stderr": 0.028380919596145866 }, "harness|truthfulqa:mc|0": { "mc1": 0.5483476132190942, "mc1_stderr": 0.01742148030027764, "mc2": 0.6965131744948723, "mc2_stderr": 0.01496885686799417 }, "harness|winogrande|5": { "acc": 0.8232044198895028, "acc_stderr": 0.010721923287918744 }, "harness|gsm8k|5": { "acc": 0.686125852918878, "acc_stderr": 0.012782681251053198 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_GreenNode__GreenNodeLM-7B-v4leo
[ "region:us" ]
2023-12-16T20:24:56+00:00
{"pretty_name": "Evaluation run of GreenNode/GreenNodeLM-7B-v4leo", "dataset_summary": "Dataset automatically created during the evaluation run of model [GreenNode/GreenNodeLM-7B-v4leo](https://huggingface.co/GreenNode/GreenNodeLM-7B-v4leo) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_GreenNode__GreenNodeLM-7B-v4leo\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T20:58:30.002770](https://huggingface.co/datasets/open-llm-leaderboard/details_GreenNode__GreenNodeLM-7B-v4leo/blob/main/results_2023-12-16T20-58-30.002770.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6550149914285618,\n \"acc_stderr\": 0.03201980400748622,\n \"acc_norm\": 0.6550580572754082,\n \"acc_norm_stderr\": 0.03267824279371986,\n \"mc1\": 0.5483476132190942,\n \"mc1_stderr\": 0.01742148030027764,\n \"mc2\": 0.6965131744948723,\n \"mc2_stderr\": 0.01496885686799417\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6825938566552902,\n \"acc_stderr\": 0.013602239088038167,\n \"acc_norm\": 0.712457337883959,\n \"acc_norm_stderr\": 0.013226719056266127\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7104162517426807,\n \"acc_stderr\": 0.004526422125860673,\n \"acc_norm\": 0.8823939454291974,\n \"acc_norm_stderr\": 0.0032148270694168255\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.04072314811876837,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.04072314811876837\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6973684210526315,\n \"acc_stderr\": 0.037385206761196686,\n \"acc_norm\": 0.6973684210526315,\n \"acc_norm_stderr\": 0.037385206761196686\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.720754716981132,\n \"acc_stderr\": 0.027611163402399715,\n \"acc_norm\": 0.720754716981132,\n \"acc_norm_stderr\": 0.027611163402399715\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7638888888888888,\n \"acc_stderr\": 0.03551446610810826,\n \"acc_norm\": 0.7638888888888888,\n \"acc_norm_stderr\": 0.03551446610810826\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.03583901754736412,\n \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.03583901754736412\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.43137254901960786,\n \"acc_stderr\": 0.04928099597287534,\n \"acc_norm\": 0.43137254901960786,\n \"acc_norm_stderr\": 0.04928099597287534\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932263,\n \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.04163331998932263\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.574468085106383,\n \"acc_stderr\": 0.03232146916224468,\n \"acc_norm\": 0.574468085106383,\n \"acc_norm_stderr\": 0.03232146916224468\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5793103448275863,\n \"acc_stderr\": 0.0411391498118926,\n \"acc_norm\": 0.5793103448275863,\n \"acc_norm_stderr\": 0.0411391498118926\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42328042328042326,\n \"acc_stderr\": 0.025446365634406783,\n \"acc_norm\": 0.42328042328042326,\n \"acc_norm_stderr\": 0.025446365634406783\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.46825396825396826,\n \"acc_stderr\": 0.04463112720677172,\n \"acc_norm\": 0.46825396825396826,\n \"acc_norm_stderr\": 0.04463112720677172\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7870967741935484,\n \"acc_stderr\": 0.023287665127268542,\n \"acc_norm\": 0.7870967741935484,\n \"acc_norm_stderr\": 0.023287665127268542\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5123152709359606,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.5123152709359606,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.031922715695483,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.031922715695483\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7828282828282829,\n \"acc_stderr\": 0.02937661648494563,\n \"acc_norm\": 0.7828282828282829,\n \"acc_norm_stderr\": 0.02937661648494563\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8860103626943006,\n \"acc_stderr\": 0.022935144053919436,\n \"acc_norm\": 0.8860103626943006,\n \"acc_norm_stderr\": 0.022935144053919436\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6871794871794872,\n \"acc_stderr\": 0.023507579020645358,\n \"acc_norm\": 0.6871794871794872,\n \"acc_norm_stderr\": 0.023507579020645358\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.337037037037037,\n \"acc_stderr\": 0.02882088466625326,\n \"acc_norm\": 0.337037037037037,\n \"acc_norm_stderr\": 0.02882088466625326\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6890756302521008,\n \"acc_stderr\": 0.03006676158297793,\n \"acc_norm\": 0.6890756302521008,\n \"acc_norm_stderr\": 0.03006676158297793\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3443708609271523,\n \"acc_stderr\": 0.038796870240733264,\n \"acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.038796870240733264\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8458715596330275,\n \"acc_stderr\": 0.015480826865374303,\n \"acc_norm\": 0.8458715596330275,\n \"acc_norm_stderr\": 0.015480826865374303\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5277777777777778,\n \"acc_stderr\": 0.0340470532865388,\n \"acc_norm\": 0.5277777777777778,\n \"acc_norm_stderr\": 0.0340470532865388\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8333333333333334,\n \"acc_stderr\": 0.02615686752393104,\n \"acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.02615686752393104\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8059071729957806,\n \"acc_stderr\": 0.025744902532290913,\n \"acc_norm\": 0.8059071729957806,\n \"acc_norm_stderr\": 0.025744902532290913\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6771300448430493,\n \"acc_stderr\": 0.031381476375754995,\n \"acc_norm\": 0.6771300448430493,\n \"acc_norm_stderr\": 0.031381476375754995\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8015267175572519,\n \"acc_stderr\": 0.03498149385462472,\n \"acc_norm\": 0.8015267175572519,\n \"acc_norm_stderr\": 0.03498149385462472\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7870370370370371,\n \"acc_stderr\": 0.0395783547198098,\n \"acc_norm\": 0.7870370370370371,\n \"acc_norm_stderr\": 0.0395783547198098\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7668711656441718,\n \"acc_stderr\": 0.0332201579577674,\n \"acc_norm\": 0.7668711656441718,\n \"acc_norm_stderr\": 0.0332201579577674\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4732142857142857,\n \"acc_stderr\": 0.047389751192741546,\n \"acc_norm\": 0.4732142857142857,\n \"acc_norm_stderr\": 0.047389751192741546\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8717948717948718,\n \"acc_stderr\": 0.02190190511507333,\n \"acc_norm\": 0.8717948717948718,\n \"acc_norm_stderr\": 0.02190190511507333\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8339719029374202,\n \"acc_stderr\": 0.0133064782430663,\n \"acc_norm\": 0.8339719029374202,\n \"acc_norm_stderr\": 0.0133064782430663\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7254335260115607,\n \"acc_stderr\": 0.024027745155265026,\n \"acc_norm\": 0.7254335260115607,\n \"acc_norm_stderr\": 0.024027745155265026\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4770949720670391,\n \"acc_stderr\": 0.016704945740326185,\n \"acc_norm\": 0.4770949720670391,\n \"acc_norm_stderr\": 0.016704945740326185\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7058823529411765,\n \"acc_stderr\": 0.026090162504279053,\n \"acc_norm\": 0.7058823529411765,\n \"acc_norm_stderr\": 0.026090162504279053\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7266881028938906,\n \"acc_stderr\": 0.02531176597542612,\n \"acc_norm\": 0.7266881028938906,\n \"acc_norm_stderr\": 0.02531176597542612\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7561728395061729,\n \"acc_stderr\": 0.02389187954195961,\n \"acc_norm\": 0.7561728395061729,\n \"acc_norm_stderr\": 0.02389187954195961\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.475177304964539,\n \"acc_stderr\": 0.029790719243829727,\n \"acc_norm\": 0.475177304964539,\n \"acc_norm_stderr\": 0.029790719243829727\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46740547588005216,\n \"acc_stderr\": 0.01274307294265335,\n \"acc_norm\": 0.46740547588005216,\n \"acc_norm_stderr\": 0.01274307294265335\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6801470588235294,\n \"acc_stderr\": 0.02833295951403121,\n \"acc_norm\": 0.6801470588235294,\n \"acc_norm_stderr\": 0.02833295951403121\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.018926082916083383,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.018926082916083383\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.04389311454644287,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.04389311454644287\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7224489795918367,\n \"acc_stderr\": 0.028666857790274648,\n \"acc_norm\": 0.7224489795918367,\n \"acc_norm_stderr\": 0.028666857790274648\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.02587064676616913,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.02587064676616913\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.03487350880197769,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.03487350880197769\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.536144578313253,\n \"acc_stderr\": 0.038823108508905954,\n \"acc_norm\": 0.536144578313253,\n \"acc_norm_stderr\": 0.038823108508905954\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5483476132190942,\n \"mc1_stderr\": 0.01742148030027764,\n \"mc2\": 0.6965131744948723,\n \"mc2_stderr\": 0.01496885686799417\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8232044198895028,\n \"acc_stderr\": 0.010721923287918744\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.686125852918878,\n \"acc_stderr\": 0.012782681251053198\n }\n}\n```", "repo_url": "https://huggingface.co/GreenNode/GreenNodeLM-7B-v4leo", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|arc:challenge|25_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|arc:challenge|25_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|gsm8k|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|gsm8k|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hellaswag|10_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hellaswag|10_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T20-22-05.730511.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T20-58-30.002770.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["**/details_harness|winogrande|5_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["**/details_harness|winogrande|5_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T20-58-30.002770.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T20_22_05.730511", "path": ["results_2023-12-16T20-22-05.730511.parquet"]}, {"split": "2023_12_16T20_58_30.002770", "path": ["results_2023-12-16T20-58-30.002770.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T20-58-30.002770.parquet"]}]}]}
2023-12-16T21:01:24+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of GreenNode/GreenNodeLM-7B-v4leo Dataset automatically created during the evaluation run of model GreenNode/GreenNodeLM-7B-v4leo on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T20:58:30.002770(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of GreenNode/GreenNodeLM-7B-v4leo\n\n\n\nDataset automatically created during the evaluation run of model GreenNode/GreenNodeLM-7B-v4leo on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T20:58:30.002770(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of GreenNode/GreenNodeLM-7B-v4leo\n\n\n\nDataset automatically created during the evaluation run of model GreenNode/GreenNodeLM-7B-v4leo on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T20:58:30.002770(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 189, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of GreenNode/GreenNodeLM-7B-v4leo\n\n\n\nDataset automatically created during the evaluation run of model GreenNode/GreenNodeLM-7B-v4leo on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T20:58:30.002770(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
ed119b5ad5d1bba7baa4aa1fcd9f7dfaa2e15d18
# Dataset Card for Evaluation run of abhinand/tamil-llama-13b-instruct-v0.1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [abhinand/tamil-llama-13b-instruct-v0.1](https://huggingface.co/abhinand/tamil-llama-13b-instruct-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_abhinand__tamil-llama-13b-instruct-v0.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T20:43:26.117293](https://huggingface.co/datasets/open-llm-leaderboard/details_abhinand__tamil-llama-13b-instruct-v0.1/blob/main/results_2023-12-16T20-43-26.117293.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5023281479237812, "acc_stderr": 0.03423071191792384, "acc_norm": 0.5093297230792332, "acc_norm_stderr": 0.03505040081506114, "mc1": 0.2937576499388005, "mc1_stderr": 0.015945068581236618, "mc2": 0.41223396750169633, "mc2_stderr": 0.014550748384881294 }, "harness|arc:challenge|25": { "acc": 0.5042662116040956, "acc_stderr": 0.014610858923956959, "acc_norm": 0.5452218430034129, "acc_norm_stderr": 0.014551507060836357 }, "harness|hellaswag|10": { "acc": 0.5853415654252141, "acc_stderr": 0.004916561213591284, "acc_norm": 0.7934674367655845, "acc_norm_stderr": 0.004039897423689424 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.04725815626252606, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252606 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4222222222222222, "acc_stderr": 0.04266763404099582, "acc_norm": 0.4222222222222222, "acc_norm_stderr": 0.04266763404099582 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5263157894736842, "acc_stderr": 0.04063302731486671, "acc_norm": 0.5263157894736842, "acc_norm_stderr": 0.04063302731486671 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.55, "acc_stderr": 0.049999999999999996, "acc_norm": 0.55, "acc_norm_stderr": 0.049999999999999996 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5320754716981132, "acc_stderr": 0.030709486992556552, "acc_norm": 0.5320754716981132, "acc_norm_stderr": 0.030709486992556552 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5, "acc_stderr": 0.04181210050035455, "acc_norm": 0.5, "acc_norm_stderr": 0.04181210050035455 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.41, "acc_stderr": 0.04943110704237102, "acc_norm": 0.41, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.4393063583815029, "acc_stderr": 0.03784271932887467, "acc_norm": 0.4393063583815029, "acc_norm_stderr": 0.03784271932887467 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.23529411764705882, "acc_stderr": 0.04220773659171452, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.04220773659171452 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.66, "acc_stderr": 0.04760952285695237, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695237 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.39574468085106385, "acc_stderr": 0.031967586978353627, "acc_norm": 0.39574468085106385, "acc_norm_stderr": 0.031967586978353627 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2894736842105263, "acc_stderr": 0.04266339443159394, "acc_norm": 0.2894736842105263, "acc_norm_stderr": 0.04266339443159394 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.45517241379310347, "acc_stderr": 0.04149886942192117, "acc_norm": 0.45517241379310347, "acc_norm_stderr": 0.04149886942192117 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.35714285714285715, "acc_stderr": 0.024677862841332783, "acc_norm": 0.35714285714285715, "acc_norm_stderr": 0.024677862841332783 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.30952380952380953, "acc_stderr": 0.04134913018303316, "acc_norm": 0.30952380952380953, "acc_norm_stderr": 0.04134913018303316 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.5612903225806452, "acc_stderr": 0.02822949732031721, "acc_norm": 0.5612903225806452, "acc_norm_stderr": 0.02822949732031721 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.32019704433497537, "acc_stderr": 0.032826493853041504, "acc_norm": 0.32019704433497537, "acc_norm_stderr": 0.032826493853041504 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6181818181818182, "acc_stderr": 0.037937131711656344, "acc_norm": 0.6181818181818182, "acc_norm_stderr": 0.037937131711656344 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.6565656565656566, "acc_stderr": 0.03383201223244441, "acc_norm": 0.6565656565656566, "acc_norm_stderr": 0.03383201223244441 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7409326424870466, "acc_stderr": 0.03161877917935413, "acc_norm": 0.7409326424870466, "acc_norm_stderr": 0.03161877917935413 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.45384615384615384, "acc_stderr": 0.025242770987126177, "acc_norm": 0.45384615384615384, "acc_norm_stderr": 0.025242770987126177 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.02671924078371216, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.02671924078371216 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5042016806722689, "acc_stderr": 0.0324773433444811, "acc_norm": 0.5042016806722689, "acc_norm_stderr": 0.0324773433444811 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31125827814569534, "acc_stderr": 0.03780445850526733, "acc_norm": 0.31125827814569534, "acc_norm_stderr": 0.03780445850526733 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7045871559633028, "acc_stderr": 0.019560619182976, "acc_norm": 0.7045871559633028, "acc_norm_stderr": 0.019560619182976 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.41203703703703703, "acc_stderr": 0.03356787758160835, "acc_norm": 0.41203703703703703, "acc_norm_stderr": 0.03356787758160835 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.6470588235294118, "acc_stderr": 0.03354092437591519, "acc_norm": 0.6470588235294118, "acc_norm_stderr": 0.03354092437591519 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.6244725738396625, "acc_stderr": 0.03152256243091156, "acc_norm": 0.6244725738396625, "acc_norm_stderr": 0.03152256243091156 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.600896860986547, "acc_stderr": 0.03286745312567961, "acc_norm": 0.600896860986547, "acc_norm_stderr": 0.03286745312567961 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.5419847328244275, "acc_stderr": 0.04369802690578756, "acc_norm": 0.5419847328244275, "acc_norm_stderr": 0.04369802690578756 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6776859504132231, "acc_stderr": 0.042664163633521685, "acc_norm": 0.6776859504132231, "acc_norm_stderr": 0.042664163633521685 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.5370370370370371, "acc_stderr": 0.04820403072760628, "acc_norm": 0.5370370370370371, "acc_norm_stderr": 0.04820403072760628 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6073619631901841, "acc_stderr": 0.03836740907831029, "acc_norm": 0.6073619631901841, "acc_norm_stderr": 0.03836740907831029 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.35714285714285715, "acc_stderr": 0.04547960999764376, "acc_norm": 0.35714285714285715, "acc_norm_stderr": 0.04547960999764376 }, "harness|hendrycksTest-management|5": { "acc": 0.6796116504854369, "acc_stderr": 0.04620284082280042, "acc_norm": 0.6796116504854369, "acc_norm_stderr": 0.04620284082280042 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7777777777777778, "acc_stderr": 0.027236013946196687, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.027236013946196687 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7139208173690932, "acc_stderr": 0.016160871405127543, "acc_norm": 0.7139208173690932, "acc_norm_stderr": 0.016160871405127543 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5635838150289018, "acc_stderr": 0.026700545424943677, "acc_norm": 0.5635838150289018, "acc_norm_stderr": 0.026700545424943677 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2581005586592179, "acc_stderr": 0.014635185616527817, "acc_norm": 0.2581005586592179, "acc_norm_stderr": 0.014635185616527817 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5294117647058824, "acc_stderr": 0.028580341065138293, "acc_norm": 0.5294117647058824, "acc_norm_stderr": 0.028580341065138293 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.5852090032154341, "acc_stderr": 0.02798268045975957, "acc_norm": 0.5852090032154341, "acc_norm_stderr": 0.02798268045975957 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5246913580246914, "acc_stderr": 0.02778680093142745, "acc_norm": 0.5246913580246914, "acc_norm_stderr": 0.02778680093142745 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.3829787234042553, "acc_stderr": 0.02899908090480617, "acc_norm": 0.3829787234042553, "acc_norm_stderr": 0.02899908090480617 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.37027379400260757, "acc_stderr": 0.012332930781256728, "acc_norm": 0.37027379400260757, "acc_norm_stderr": 0.012332930781256728 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.44485294117647056, "acc_stderr": 0.03018753206032939, "acc_norm": 0.44485294117647056, "acc_norm_stderr": 0.03018753206032939 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.49836601307189543, "acc_stderr": 0.020227726838150124, "acc_norm": 0.49836601307189543, "acc_norm_stderr": 0.020227726838150124 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5636363636363636, "acc_stderr": 0.04750185058907296, "acc_norm": 0.5636363636363636, "acc_norm_stderr": 0.04750185058907296 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.5877551020408164, "acc_stderr": 0.03151236044674269, "acc_norm": 0.5877551020408164, "acc_norm_stderr": 0.03151236044674269 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6616915422885572, "acc_stderr": 0.033455630703391935, "acc_norm": 0.6616915422885572, "acc_norm_stderr": 0.033455630703391935 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.74, "acc_stderr": 0.0440844002276808, "acc_norm": 0.74, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-virology|5": { "acc": 0.46987951807228917, "acc_stderr": 0.03885425420866766, "acc_norm": 0.46987951807228917, "acc_norm_stderr": 0.03885425420866766 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7017543859649122, "acc_stderr": 0.03508771929824564, "acc_norm": 0.7017543859649122, "acc_norm_stderr": 0.03508771929824564 }, "harness|truthfulqa:mc|0": { "mc1": 0.2937576499388005, "mc1_stderr": 0.015945068581236618, "mc2": 0.41223396750169633, "mc2_stderr": 0.014550748384881294 }, "harness|winogrande|5": { "acc": 0.7655880031570639, "acc_stderr": 0.011906130106237985 }, "harness|gsm8k|5": { "acc": 0.07505686125852919, "acc_stderr": 0.007257633145486643 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_abhinand__tamil-llama-13b-instruct-v0.1
[ "region:us" ]
2023-12-16T20:46:25+00:00
{"pretty_name": "Evaluation run of abhinand/tamil-llama-13b-instruct-v0.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [abhinand/tamil-llama-13b-instruct-v0.1](https://huggingface.co/abhinand/tamil-llama-13b-instruct-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_abhinand__tamil-llama-13b-instruct-v0.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T20:43:26.117293](https://huggingface.co/datasets/open-llm-leaderboard/details_abhinand__tamil-llama-13b-instruct-v0.1/blob/main/results_2023-12-16T20-43-26.117293.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5023281479237812,\n \"acc_stderr\": 0.03423071191792384,\n \"acc_norm\": 0.5093297230792332,\n \"acc_norm_stderr\": 0.03505040081506114,\n \"mc1\": 0.2937576499388005,\n \"mc1_stderr\": 0.015945068581236618,\n \"mc2\": 0.41223396750169633,\n \"mc2_stderr\": 0.014550748384881294\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5042662116040956,\n \"acc_stderr\": 0.014610858923956959,\n \"acc_norm\": 0.5452218430034129,\n \"acc_norm_stderr\": 0.014551507060836357\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5853415654252141,\n \"acc_stderr\": 0.004916561213591284,\n \"acc_norm\": 0.7934674367655845,\n \"acc_norm_stderr\": 0.004039897423689424\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252606,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252606\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4222222222222222,\n \"acc_stderr\": 0.04266763404099582,\n \"acc_norm\": 0.4222222222222222,\n \"acc_norm_stderr\": 0.04266763404099582\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5263157894736842,\n \"acc_stderr\": 0.04063302731486671,\n \"acc_norm\": 0.5263157894736842,\n \"acc_norm_stderr\": 0.04063302731486671\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.049999999999999996,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.049999999999999996\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5320754716981132,\n \"acc_stderr\": 0.030709486992556552,\n \"acc_norm\": 0.5320754716981132,\n \"acc_norm_stderr\": 0.030709486992556552\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.04181210050035455,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.04181210050035455\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.4393063583815029,\n \"acc_stderr\": 0.03784271932887467,\n \"acc_norm\": 0.4393063583815029,\n \"acc_norm_stderr\": 0.03784271932887467\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.23529411764705882,\n \"acc_stderr\": 0.04220773659171452,\n \"acc_norm\": 0.23529411764705882,\n \"acc_norm_stderr\": 0.04220773659171452\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695237,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695237\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.39574468085106385,\n \"acc_stderr\": 0.031967586978353627,\n \"acc_norm\": 0.39574468085106385,\n \"acc_norm_stderr\": 0.031967586978353627\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2894736842105263,\n \"acc_stderr\": 0.04266339443159394,\n \"acc_norm\": 0.2894736842105263,\n \"acc_norm_stderr\": 0.04266339443159394\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.45517241379310347,\n \"acc_stderr\": 0.04149886942192117,\n \"acc_norm\": 0.45517241379310347,\n \"acc_norm_stderr\": 0.04149886942192117\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.35714285714285715,\n \"acc_stderr\": 0.024677862841332783,\n \"acc_norm\": 0.35714285714285715,\n \"acc_norm_stderr\": 0.024677862841332783\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.30952380952380953,\n \"acc_stderr\": 0.04134913018303316,\n \"acc_norm\": 0.30952380952380953,\n \"acc_norm_stderr\": 0.04134913018303316\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.5612903225806452,\n \"acc_stderr\": 0.02822949732031721,\n \"acc_norm\": 0.5612903225806452,\n \"acc_norm_stderr\": 0.02822949732031721\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.32019704433497537,\n \"acc_stderr\": 0.032826493853041504,\n \"acc_norm\": 0.32019704433497537,\n \"acc_norm_stderr\": 0.032826493853041504\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.6181818181818182,\n \"acc_stderr\": 0.037937131711656344,\n \"acc_norm\": 0.6181818181818182,\n \"acc_norm_stderr\": 0.037937131711656344\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.6565656565656566,\n \"acc_stderr\": 0.03383201223244441,\n \"acc_norm\": 0.6565656565656566,\n \"acc_norm_stderr\": 0.03383201223244441\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.7409326424870466,\n \"acc_stderr\": 0.03161877917935413,\n \"acc_norm\": 0.7409326424870466,\n \"acc_norm_stderr\": 0.03161877917935413\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.45384615384615384,\n \"acc_stderr\": 0.025242770987126177,\n \"acc_norm\": 0.45384615384615384,\n \"acc_norm_stderr\": 0.025242770987126177\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.02671924078371216,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.02671924078371216\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.5042016806722689,\n \"acc_stderr\": 0.0324773433444811,\n \"acc_norm\": 0.5042016806722689,\n \"acc_norm_stderr\": 0.0324773433444811\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31125827814569534,\n \"acc_stderr\": 0.03780445850526733,\n \"acc_norm\": 0.31125827814569534,\n \"acc_norm_stderr\": 0.03780445850526733\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7045871559633028,\n \"acc_stderr\": 0.019560619182976,\n \"acc_norm\": 0.7045871559633028,\n \"acc_norm_stderr\": 0.019560619182976\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.41203703703703703,\n \"acc_stderr\": 0.03356787758160835,\n \"acc_norm\": 0.41203703703703703,\n \"acc_norm_stderr\": 0.03356787758160835\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.6470588235294118,\n \"acc_stderr\": 0.03354092437591519,\n \"acc_norm\": 0.6470588235294118,\n \"acc_norm_stderr\": 0.03354092437591519\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.6244725738396625,\n \"acc_stderr\": 0.03152256243091156,\n \"acc_norm\": 0.6244725738396625,\n \"acc_norm_stderr\": 0.03152256243091156\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.600896860986547,\n \"acc_stderr\": 0.03286745312567961,\n \"acc_norm\": 0.600896860986547,\n \"acc_norm_stderr\": 0.03286745312567961\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.5419847328244275,\n \"acc_stderr\": 0.04369802690578756,\n \"acc_norm\": 0.5419847328244275,\n \"acc_norm_stderr\": 0.04369802690578756\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6776859504132231,\n \"acc_stderr\": 0.042664163633521685,\n \"acc_norm\": 0.6776859504132231,\n \"acc_norm_stderr\": 0.042664163633521685\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.5370370370370371,\n \"acc_stderr\": 0.04820403072760628,\n \"acc_norm\": 0.5370370370370371,\n \"acc_norm_stderr\": 0.04820403072760628\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6073619631901841,\n \"acc_stderr\": 0.03836740907831029,\n \"acc_norm\": 0.6073619631901841,\n \"acc_norm_stderr\": 0.03836740907831029\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.35714285714285715,\n \"acc_stderr\": 0.04547960999764376,\n \"acc_norm\": 0.35714285714285715,\n \"acc_norm_stderr\": 0.04547960999764376\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6796116504854369,\n \"acc_stderr\": 0.04620284082280042,\n \"acc_norm\": 0.6796116504854369,\n \"acc_norm_stderr\": 0.04620284082280042\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.027236013946196687,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.027236013946196687\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7139208173690932,\n \"acc_stderr\": 0.016160871405127543,\n \"acc_norm\": 0.7139208173690932,\n \"acc_norm_stderr\": 0.016160871405127543\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.5635838150289018,\n \"acc_stderr\": 0.026700545424943677,\n \"acc_norm\": 0.5635838150289018,\n \"acc_norm_stderr\": 0.026700545424943677\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2581005586592179,\n \"acc_stderr\": 0.014635185616527817,\n \"acc_norm\": 0.2581005586592179,\n \"acc_norm_stderr\": 0.014635185616527817\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.5294117647058824,\n \"acc_stderr\": 0.028580341065138293,\n \"acc_norm\": 0.5294117647058824,\n \"acc_norm_stderr\": 0.028580341065138293\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.5852090032154341,\n \"acc_stderr\": 0.02798268045975957,\n \"acc_norm\": 0.5852090032154341,\n \"acc_norm_stderr\": 0.02798268045975957\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.5246913580246914,\n \"acc_stderr\": 0.02778680093142745,\n \"acc_norm\": 0.5246913580246914,\n \"acc_norm_stderr\": 0.02778680093142745\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.3829787234042553,\n \"acc_stderr\": 0.02899908090480617,\n \"acc_norm\": 0.3829787234042553,\n \"acc_norm_stderr\": 0.02899908090480617\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.37027379400260757,\n \"acc_stderr\": 0.012332930781256728,\n \"acc_norm\": 0.37027379400260757,\n \"acc_norm_stderr\": 0.012332930781256728\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.44485294117647056,\n \"acc_stderr\": 0.03018753206032939,\n \"acc_norm\": 0.44485294117647056,\n \"acc_norm_stderr\": 0.03018753206032939\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.49836601307189543,\n \"acc_stderr\": 0.020227726838150124,\n \"acc_norm\": 0.49836601307189543,\n \"acc_norm_stderr\": 0.020227726838150124\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5636363636363636,\n \"acc_stderr\": 0.04750185058907296,\n \"acc_norm\": 0.5636363636363636,\n \"acc_norm_stderr\": 0.04750185058907296\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.5877551020408164,\n \"acc_stderr\": 0.03151236044674269,\n \"acc_norm\": 0.5877551020408164,\n \"acc_norm_stderr\": 0.03151236044674269\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6616915422885572,\n \"acc_stderr\": 0.033455630703391935,\n \"acc_norm\": 0.6616915422885572,\n \"acc_norm_stderr\": 0.033455630703391935\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.46987951807228917,\n \"acc_stderr\": 0.03885425420866766,\n \"acc_norm\": 0.46987951807228917,\n \"acc_norm_stderr\": 0.03885425420866766\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7017543859649122,\n \"acc_stderr\": 0.03508771929824564,\n \"acc_norm\": 0.7017543859649122,\n \"acc_norm_stderr\": 0.03508771929824564\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2937576499388005,\n \"mc1_stderr\": 0.015945068581236618,\n \"mc2\": 0.41223396750169633,\n \"mc2_stderr\": 0.014550748384881294\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7655880031570639,\n \"acc_stderr\": 0.011906130106237985\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.07505686125852919,\n \"acc_stderr\": 0.007257633145486643\n }\n}\n```", "repo_url": "https://huggingface.co/abhinand/tamil-llama-13b-instruct-v0.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|arc:challenge|25_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|gsm8k|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hellaswag|10_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T20-43-26.117293.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["**/details_harness|winogrande|5_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T20-43-26.117293.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T20_43_26.117293", "path": ["results_2023-12-16T20-43-26.117293.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T20-43-26.117293.parquet"]}]}]}
2023-12-16T20:47:09+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of abhinand/tamil-llama-13b-instruct-v0.1 Dataset automatically created during the evaluation run of model abhinand/tamil-llama-13b-instruct-v0.1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T20:43:26.117293(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of abhinand/tamil-llama-13b-instruct-v0.1\n\n\n\nDataset automatically created during the evaluation run of model abhinand/tamil-llama-13b-instruct-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T20:43:26.117293(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of abhinand/tamil-llama-13b-instruct-v0.1\n\n\n\nDataset automatically created during the evaluation run of model abhinand/tamil-llama-13b-instruct-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T20:43:26.117293(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 193, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of abhinand/tamil-llama-13b-instruct-v0.1\n\n\n\nDataset automatically created during the evaluation run of model abhinand/tamil-llama-13b-instruct-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T20:43:26.117293(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
13d371e71d095c62e822414d6a0627238bca73ec
# Dataset Card for Evaluation run of GreenNode/GreenNodeLM-v3olet-7B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [GreenNode/GreenNodeLM-v3olet-7B](https://huggingface.co/GreenNode/GreenNodeLM-v3olet-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_GreenNode__GreenNodeLM-v3olet-7B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T20:49:02.259410](https://huggingface.co/datasets/open-llm-leaderboard/details_GreenNode__GreenNodeLM-v3olet-7B/blob/main/results_2023-12-16T20-49-02.259410.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.657982142300908, "acc_stderr": 0.03197829574357859, "acc_norm": 0.6577587599136167, "acc_norm_stderr": 0.032639601635152275, "mc1": 0.554467564259486, "mc1_stderr": 0.017399335280140343, "mc2": 0.695178465897982, "mc2_stderr": 0.015007650690745592 }, "harness|arc:challenge|25": { "acc": 0.6919795221843004, "acc_stderr": 0.013491429517292038, "acc_norm": 0.7226962457337884, "acc_norm_stderr": 0.013082095839059376 }, "harness|hellaswag|10": { "acc": 0.7102170882294364, "acc_stderr": 0.004527343651130799, "acc_norm": 0.8824935271858195, "acc_norm_stderr": 0.0032136470410029463 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6444444444444445, "acc_stderr": 0.04135176749720386, "acc_norm": 0.6444444444444445, "acc_norm_stderr": 0.04135176749720386 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6907894736842105, "acc_stderr": 0.037610708698674805, "acc_norm": 0.6907894736842105, "acc_norm_stderr": 0.037610708698674805 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.62, "acc_stderr": 0.048783173121456316, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7245283018867924, "acc_stderr": 0.02749566368372406, "acc_norm": 0.7245283018867924, "acc_norm_stderr": 0.02749566368372406 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7777777777777778, "acc_stderr": 0.03476590104304134, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.03476590104304134 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6936416184971098, "acc_stderr": 0.035149425512674394, "acc_norm": 0.6936416184971098, "acc_norm_stderr": 0.035149425512674394 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.46078431372549017, "acc_stderr": 0.04959859966384181, "acc_norm": 0.46078431372549017, "acc_norm_stderr": 0.04959859966384181 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.042295258468165065, "acc_norm": 0.77, "acc_norm_stderr": 0.042295258468165065 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5829787234042553, "acc_stderr": 0.03223276266711712, "acc_norm": 0.5829787234042553, "acc_norm_stderr": 0.03223276266711712 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5087719298245614, "acc_stderr": 0.04702880432049615, "acc_norm": 0.5087719298245614, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5793103448275863, "acc_stderr": 0.0411391498118926, "acc_norm": 0.5793103448275863, "acc_norm_stderr": 0.0411391498118926 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42857142857142855, "acc_stderr": 0.02548718714785938, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.02548718714785938 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.46825396825396826, "acc_stderr": 0.04463112720677172, "acc_norm": 0.46825396825396826, "acc_norm_stderr": 0.04463112720677172 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7838709677419354, "acc_stderr": 0.02341529343356853, "acc_norm": 0.7838709677419354, "acc_norm_stderr": 0.02341529343356853 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5221674876847291, "acc_stderr": 0.03514528562175007, "acc_norm": 0.5221674876847291, "acc_norm_stderr": 0.03514528562175007 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7757575757575758, "acc_stderr": 0.03256866661681102, "acc_norm": 0.7757575757575758, "acc_norm_stderr": 0.03256866661681102 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7929292929292929, "acc_stderr": 0.028869778460267042, "acc_norm": 0.7929292929292929, "acc_norm_stderr": 0.028869778460267042 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8963730569948186, "acc_stderr": 0.02199531196364424, "acc_norm": 0.8963730569948186, "acc_norm_stderr": 0.02199531196364424 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6666666666666666, "acc_stderr": 0.023901157979402538, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.023901157979402538 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.32592592592592595, "acc_stderr": 0.028578348365473082, "acc_norm": 0.32592592592592595, "acc_norm_stderr": 0.028578348365473082 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6680672268907563, "acc_stderr": 0.03058869701378364, "acc_norm": 0.6680672268907563, "acc_norm_stderr": 0.03058869701378364 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3443708609271523, "acc_stderr": 0.038796870240733264, "acc_norm": 0.3443708609271523, "acc_norm_stderr": 0.038796870240733264 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8532110091743119, "acc_stderr": 0.01517314184512625, "acc_norm": 0.8532110091743119, "acc_norm_stderr": 0.01517314184512625 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5416666666666666, "acc_stderr": 0.03398110890294636, "acc_norm": 0.5416666666666666, "acc_norm_stderr": 0.03398110890294636 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8333333333333334, "acc_stderr": 0.02615686752393104, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.02615686752393104 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8059071729957806, "acc_stderr": 0.025744902532290916, "acc_norm": 0.8059071729957806, "acc_norm_stderr": 0.025744902532290916 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6905829596412556, "acc_stderr": 0.031024411740572213, "acc_norm": 0.6905829596412556, "acc_norm_stderr": 0.031024411740572213 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8244274809160306, "acc_stderr": 0.03336820338476074, "acc_norm": 0.8244274809160306, "acc_norm_stderr": 0.03336820338476074 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8016528925619835, "acc_stderr": 0.03640118271990947, "acc_norm": 0.8016528925619835, "acc_norm_stderr": 0.03640118271990947 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7962962962962963, "acc_stderr": 0.03893542518824847, "acc_norm": 0.7962962962962963, "acc_norm_stderr": 0.03893542518824847 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7852760736196319, "acc_stderr": 0.03226219377286775, "acc_norm": 0.7852760736196319, "acc_norm_stderr": 0.03226219377286775 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.42857142857142855, "acc_stderr": 0.04697113923010212, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.04697113923010212 }, "harness|hendrycksTest-management|5": { "acc": 0.7572815533980582, "acc_stderr": 0.04245022486384495, "acc_norm": 0.7572815533980582, "acc_norm_stderr": 0.04245022486384495 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8760683760683761, "acc_stderr": 0.021586494001281376, "acc_norm": 0.8760683760683761, "acc_norm_stderr": 0.021586494001281376 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8352490421455939, "acc_stderr": 0.013265346261323797, "acc_norm": 0.8352490421455939, "acc_norm_stderr": 0.013265346261323797 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7398843930635838, "acc_stderr": 0.023618678310069356, "acc_norm": 0.7398843930635838, "acc_norm_stderr": 0.023618678310069356 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4759776536312849, "acc_stderr": 0.016703190189300186, "acc_norm": 0.4759776536312849, "acc_norm_stderr": 0.016703190189300186 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7254901960784313, "acc_stderr": 0.025553169991826528, "acc_norm": 0.7254901960784313, "acc_norm_stderr": 0.025553169991826528 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7170418006430869, "acc_stderr": 0.02558306248998481, "acc_norm": 0.7170418006430869, "acc_norm_stderr": 0.02558306248998481 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7469135802469136, "acc_stderr": 0.024191808600712995, "acc_norm": 0.7469135802469136, "acc_norm_stderr": 0.024191808600712995 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.48936170212765956, "acc_stderr": 0.029820747191422473, "acc_norm": 0.48936170212765956, "acc_norm_stderr": 0.029820747191422473 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46936114732724904, "acc_stderr": 0.012746237711716634, "acc_norm": 0.46936114732724904, "acc_norm_stderr": 0.012746237711716634 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6838235294117647, "acc_stderr": 0.02824568739146293, "acc_norm": 0.6838235294117647, "acc_norm_stderr": 0.02824568739146293 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.673202614379085, "acc_stderr": 0.018975427920507208, "acc_norm": 0.673202614379085, "acc_norm_stderr": 0.018975427920507208 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.726530612244898, "acc_stderr": 0.028535560337128438, "acc_norm": 0.726530612244898, "acc_norm_stderr": 0.028535560337128438 }, "harness|hendrycksTest-sociology|5": { "acc": 0.835820895522388, "acc_stderr": 0.026193923544454115, "acc_norm": 0.835820895522388, "acc_norm_stderr": 0.026193923544454115 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.0348735088019777, "acc_norm": 0.86, "acc_norm_stderr": 0.0348735088019777 }, "harness|hendrycksTest-virology|5": { "acc": 0.5481927710843374, "acc_stderr": 0.03874371556587953, "acc_norm": 0.5481927710843374, "acc_norm_stderr": 0.03874371556587953 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8245614035087719, "acc_stderr": 0.029170885500727665, "acc_norm": 0.8245614035087719, "acc_norm_stderr": 0.029170885500727665 }, "harness|truthfulqa:mc|0": { "mc1": 0.554467564259486, "mc1_stderr": 0.017399335280140343, "mc2": 0.695178465897982, "mc2_stderr": 0.015007650690745592 }, "harness|winogrande|5": { "acc": 0.824782951854775, "acc_stderr": 0.010684179227706177 }, "harness|gsm8k|5": { "acc": 0.7073540561031084, "acc_stderr": 0.01253233436824289 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_GreenNode__GreenNodeLM-v3olet-7B
[ "region:us" ]
2023-12-16T20:51:51+00:00
{"pretty_name": "Evaluation run of GreenNode/GreenNodeLM-v3olet-7B", "dataset_summary": "Dataset automatically created during the evaluation run of model [GreenNode/GreenNodeLM-v3olet-7B](https://huggingface.co/GreenNode/GreenNodeLM-v3olet-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_GreenNode__GreenNodeLM-v3olet-7B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T20:49:02.259410](https://huggingface.co/datasets/open-llm-leaderboard/details_GreenNode__GreenNodeLM-v3olet-7B/blob/main/results_2023-12-16T20-49-02.259410.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.657982142300908,\n \"acc_stderr\": 0.03197829574357859,\n \"acc_norm\": 0.6577587599136167,\n \"acc_norm_stderr\": 0.032639601635152275,\n \"mc1\": 0.554467564259486,\n \"mc1_stderr\": 0.017399335280140343,\n \"mc2\": 0.695178465897982,\n \"mc2_stderr\": 0.015007650690745592\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6919795221843004,\n \"acc_stderr\": 0.013491429517292038,\n \"acc_norm\": 0.7226962457337884,\n \"acc_norm_stderr\": 0.013082095839059376\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7102170882294364,\n \"acc_stderr\": 0.004527343651130799,\n \"acc_norm\": 0.8824935271858195,\n \"acc_norm_stderr\": 0.0032136470410029463\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6444444444444445,\n \"acc_stderr\": 0.04135176749720386,\n \"acc_norm\": 0.6444444444444445,\n \"acc_norm_stderr\": 0.04135176749720386\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6907894736842105,\n \"acc_stderr\": 0.037610708698674805,\n \"acc_norm\": 0.6907894736842105,\n \"acc_norm_stderr\": 0.037610708698674805\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7245283018867924,\n \"acc_stderr\": 0.02749566368372406,\n \"acc_norm\": 0.7245283018867924,\n \"acc_norm_stderr\": 0.02749566368372406\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.03476590104304134,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.03476590104304134\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6936416184971098,\n \"acc_stderr\": 0.035149425512674394,\n \"acc_norm\": 0.6936416184971098,\n \"acc_norm_stderr\": 0.035149425512674394\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.46078431372549017,\n \"acc_stderr\": 0.04959859966384181,\n \"acc_norm\": 0.46078431372549017,\n \"acc_norm_stderr\": 0.04959859966384181\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.042295258468165065,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.042295258468165065\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5829787234042553,\n \"acc_stderr\": 0.03223276266711712,\n \"acc_norm\": 0.5829787234042553,\n \"acc_norm_stderr\": 0.03223276266711712\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5087719298245614,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.5087719298245614,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5793103448275863,\n \"acc_stderr\": 0.0411391498118926,\n \"acc_norm\": 0.5793103448275863,\n \"acc_norm_stderr\": 0.0411391498118926\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.02548718714785938,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.02548718714785938\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.46825396825396826,\n \"acc_stderr\": 0.04463112720677172,\n \"acc_norm\": 0.46825396825396826,\n \"acc_norm_stderr\": 0.04463112720677172\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7838709677419354,\n \"acc_stderr\": 0.02341529343356853,\n \"acc_norm\": 0.7838709677419354,\n \"acc_norm_stderr\": 0.02341529343356853\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5221674876847291,\n \"acc_stderr\": 0.03514528562175007,\n \"acc_norm\": 0.5221674876847291,\n \"acc_norm_stderr\": 0.03514528562175007\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.03256866661681102,\n \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.03256866661681102\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7929292929292929,\n \"acc_stderr\": 0.028869778460267042,\n \"acc_norm\": 0.7929292929292929,\n \"acc_norm_stderr\": 0.028869778460267042\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.02199531196364424,\n \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.02199531196364424\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.023901157979402538,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.023901157979402538\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.32592592592592595,\n \"acc_stderr\": 0.028578348365473082,\n \"acc_norm\": 0.32592592592592595,\n \"acc_norm_stderr\": 0.028578348365473082\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6680672268907563,\n \"acc_stderr\": 0.03058869701378364,\n \"acc_norm\": 0.6680672268907563,\n \"acc_norm_stderr\": 0.03058869701378364\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3443708609271523,\n \"acc_stderr\": 0.038796870240733264,\n \"acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.038796870240733264\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8532110091743119,\n \"acc_stderr\": 0.01517314184512625,\n \"acc_norm\": 0.8532110091743119,\n \"acc_norm_stderr\": 0.01517314184512625\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5416666666666666,\n \"acc_stderr\": 0.03398110890294636,\n \"acc_norm\": 0.5416666666666666,\n \"acc_norm_stderr\": 0.03398110890294636\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8333333333333334,\n \"acc_stderr\": 0.02615686752393104,\n \"acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.02615686752393104\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8059071729957806,\n \"acc_stderr\": 0.025744902532290916,\n \"acc_norm\": 0.8059071729957806,\n \"acc_norm_stderr\": 0.025744902532290916\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6905829596412556,\n \"acc_stderr\": 0.031024411740572213,\n \"acc_norm\": 0.6905829596412556,\n \"acc_norm_stderr\": 0.031024411740572213\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8244274809160306,\n \"acc_stderr\": 0.03336820338476074,\n \"acc_norm\": 0.8244274809160306,\n \"acc_norm_stderr\": 0.03336820338476074\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8016528925619835,\n \"acc_stderr\": 0.03640118271990947,\n \"acc_norm\": 0.8016528925619835,\n \"acc_norm_stderr\": 0.03640118271990947\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7962962962962963,\n \"acc_stderr\": 0.03893542518824847,\n \"acc_norm\": 0.7962962962962963,\n \"acc_norm_stderr\": 0.03893542518824847\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7852760736196319,\n \"acc_stderr\": 0.03226219377286775,\n \"acc_norm\": 0.7852760736196319,\n \"acc_norm_stderr\": 0.03226219377286775\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.04697113923010212,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.04697113923010212\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7572815533980582,\n \"acc_stderr\": 0.04245022486384495,\n \"acc_norm\": 0.7572815533980582,\n \"acc_norm_stderr\": 0.04245022486384495\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8760683760683761,\n \"acc_stderr\": 0.021586494001281376,\n \"acc_norm\": 0.8760683760683761,\n \"acc_norm_stderr\": 0.021586494001281376\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8352490421455939,\n \"acc_stderr\": 0.013265346261323797,\n \"acc_norm\": 0.8352490421455939,\n \"acc_norm_stderr\": 0.013265346261323797\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7398843930635838,\n \"acc_stderr\": 0.023618678310069356,\n \"acc_norm\": 0.7398843930635838,\n \"acc_norm_stderr\": 0.023618678310069356\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4759776536312849,\n \"acc_stderr\": 0.016703190189300186,\n \"acc_norm\": 0.4759776536312849,\n \"acc_norm_stderr\": 0.016703190189300186\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7254901960784313,\n \"acc_stderr\": 0.025553169991826528,\n \"acc_norm\": 0.7254901960784313,\n \"acc_norm_stderr\": 0.025553169991826528\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7170418006430869,\n \"acc_stderr\": 0.02558306248998481,\n \"acc_norm\": 0.7170418006430869,\n \"acc_norm_stderr\": 0.02558306248998481\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7469135802469136,\n \"acc_stderr\": 0.024191808600712995,\n \"acc_norm\": 0.7469135802469136,\n \"acc_norm_stderr\": 0.024191808600712995\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.48936170212765956,\n \"acc_stderr\": 0.029820747191422473,\n \"acc_norm\": 0.48936170212765956,\n \"acc_norm_stderr\": 0.029820747191422473\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46936114732724904,\n \"acc_stderr\": 0.012746237711716634,\n \"acc_norm\": 0.46936114732724904,\n \"acc_norm_stderr\": 0.012746237711716634\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6838235294117647,\n \"acc_stderr\": 0.02824568739146293,\n \"acc_norm\": 0.6838235294117647,\n \"acc_norm_stderr\": 0.02824568739146293\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.673202614379085,\n \"acc_stderr\": 0.018975427920507208,\n \"acc_norm\": 0.673202614379085,\n \"acc_norm_stderr\": 0.018975427920507208\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.726530612244898,\n \"acc_stderr\": 0.028535560337128438,\n \"acc_norm\": 0.726530612244898,\n \"acc_norm_stderr\": 0.028535560337128438\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.026193923544454115,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.026193923544454115\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.0348735088019777,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.0348735088019777\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5481927710843374,\n \"acc_stderr\": 0.03874371556587953,\n \"acc_norm\": 0.5481927710843374,\n \"acc_norm_stderr\": 0.03874371556587953\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8245614035087719,\n \"acc_stderr\": 0.029170885500727665,\n \"acc_norm\": 0.8245614035087719,\n \"acc_norm_stderr\": 0.029170885500727665\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.554467564259486,\n \"mc1_stderr\": 0.017399335280140343,\n \"mc2\": 0.695178465897982,\n \"mc2_stderr\": 0.015007650690745592\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.824782951854775,\n \"acc_stderr\": 0.010684179227706177\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7073540561031084,\n \"acc_stderr\": 0.01253233436824289\n }\n}\n```", "repo_url": "https://huggingface.co/GreenNode/GreenNodeLM-v3olet-7B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|arc:challenge|25_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|gsm8k|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hellaswag|10_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T20-49-02.259410.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["**/details_harness|winogrande|5_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T20-49-02.259410.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T20_49_02.259410", "path": ["results_2023-12-16T20-49-02.259410.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T20-49-02.259410.parquet"]}]}]}
2023-12-16T20:52:36+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of GreenNode/GreenNodeLM-v3olet-7B Dataset automatically created during the evaluation run of model GreenNode/GreenNodeLM-v3olet-7B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T20:49:02.259410(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of GreenNode/GreenNodeLM-v3olet-7B\n\n\n\nDataset automatically created during the evaluation run of model GreenNode/GreenNodeLM-v3olet-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T20:49:02.259410(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of GreenNode/GreenNodeLM-v3olet-7B\n\n\n\nDataset automatically created during the evaluation run of model GreenNode/GreenNodeLM-v3olet-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T20:49:02.259410(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 191, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of GreenNode/GreenNodeLM-v3olet-7B\n\n\n\nDataset automatically created during the evaluation run of model GreenNode/GreenNodeLM-v3olet-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T20:49:02.259410(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
023df381c5bc0e5999487cc204af98d36ec05566
# Dataset Card for Evaluation run of perlthoughts/Starling-LM-alpha-8x7B-MoE <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [perlthoughts/Starling-LM-alpha-8x7B-MoE](https://huggingface.co/perlthoughts/Starling-LM-alpha-8x7B-MoE) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_perlthoughts__Starling-LM-alpha-8x7B-MoE", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T21:04:03.066898](https://huggingface.co/datasets/open-llm-leaderboard/details_perlthoughts__Starling-LM-alpha-8x7B-MoE/blob/main/results_2023-12-16T21-04-03.066898.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6485827629012115, "acc_stderr": 0.03189932261733175, "acc_norm": 0.6500544215634738, "acc_norm_stderr": 0.032542506459412, "mc1": 0.3047735618115055, "mc1_stderr": 0.01611412415688245, "mc2": 0.4639249177352108, "mc2_stderr": 0.015154559507326514 }, "harness|arc:challenge|25": { "acc": 0.5998293515358362, "acc_stderr": 0.014317197787809172, "acc_norm": 0.636518771331058, "acc_norm_stderr": 0.014056207319068283 }, "harness|hellaswag|10": { "acc": 0.6655048795060745, "acc_stderr": 0.004708494114574018, "acc_norm": 0.8490340569607648, "acc_norm_stderr": 0.0035728399695219874 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.37, "acc_stderr": 0.048523658709391, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6148148148148148, "acc_stderr": 0.04203921040156279, "acc_norm": 0.6148148148148148, "acc_norm_stderr": 0.04203921040156279 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6907894736842105, "acc_stderr": 0.037610708698674805, "acc_norm": 0.6907894736842105, "acc_norm_stderr": 0.037610708698674805 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.63, "acc_stderr": 0.04852365870939099, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6867924528301886, "acc_stderr": 0.028544793319055326, "acc_norm": 0.6867924528301886, "acc_norm_stderr": 0.028544793319055326 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7638888888888888, "acc_stderr": 0.03551446610810826, "acc_norm": 0.7638888888888888, "acc_norm_stderr": 0.03551446610810826 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.04688261722621505, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6589595375722543, "acc_stderr": 0.03614665424180826, "acc_norm": 0.6589595375722543, "acc_norm_stderr": 0.03614665424180826 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4411764705882353, "acc_stderr": 0.049406356306056595, "acc_norm": 0.4411764705882353, "acc_norm_stderr": 0.049406356306056595 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.74, "acc_stderr": 0.04408440022768079, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768079 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5531914893617021, "acc_stderr": 0.032500536843658404, "acc_norm": 0.5531914893617021, "acc_norm_stderr": 0.032500536843658404 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.04702880432049615, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5724137931034483, "acc_stderr": 0.04122737111370332, "acc_norm": 0.5724137931034483, "acc_norm_stderr": 0.04122737111370332 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.40476190476190477, "acc_stderr": 0.025279850397404904, "acc_norm": 0.40476190476190477, "acc_norm_stderr": 0.025279850397404904 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5238095238095238, "acc_stderr": 0.04467062628403273, "acc_norm": 0.5238095238095238, "acc_norm_stderr": 0.04467062628403273 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.22, "acc_stderr": 0.04163331998932269, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932269 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7903225806451613, "acc_stderr": 0.023157879349083525, "acc_norm": 0.7903225806451613, "acc_norm_stderr": 0.023157879349083525 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.46798029556650245, "acc_stderr": 0.035107665979592154, "acc_norm": 0.46798029556650245, "acc_norm_stderr": 0.035107665979592154 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7636363636363637, "acc_stderr": 0.03317505930009181, "acc_norm": 0.7636363636363637, "acc_norm_stderr": 0.03317505930009181 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.797979797979798, "acc_stderr": 0.028606204289229865, "acc_norm": 0.797979797979798, "acc_norm_stderr": 0.028606204289229865 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9067357512953368, "acc_stderr": 0.02098685459328974, "acc_norm": 0.9067357512953368, "acc_norm_stderr": 0.02098685459328974 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.676923076923077, "acc_stderr": 0.02371088850197057, "acc_norm": 0.676923076923077, "acc_norm_stderr": 0.02371088850197057 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.32222222222222224, "acc_stderr": 0.028493465091028593, "acc_norm": 0.32222222222222224, "acc_norm_stderr": 0.028493465091028593 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.680672268907563, "acc_stderr": 0.0302839955258844, "acc_norm": 0.680672268907563, "acc_norm_stderr": 0.0302839955258844 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.37748344370860926, "acc_stderr": 0.03958027231121569, "acc_norm": 0.37748344370860926, "acc_norm_stderr": 0.03958027231121569 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8477064220183487, "acc_stderr": 0.015405084393157074, "acc_norm": 0.8477064220183487, "acc_norm_stderr": 0.015405084393157074 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5, "acc_stderr": 0.034099716973523674, "acc_norm": 0.5, "acc_norm_stderr": 0.034099716973523674 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8235294117647058, "acc_stderr": 0.026756401538078966, "acc_norm": 0.8235294117647058, "acc_norm_stderr": 0.026756401538078966 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8270042194092827, "acc_stderr": 0.024621562866768434, "acc_norm": 0.8270042194092827, "acc_norm_stderr": 0.024621562866768434 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7130044843049327, "acc_stderr": 0.030360379710291947, "acc_norm": 0.7130044843049327, "acc_norm_stderr": 0.030360379710291947 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7862595419847328, "acc_stderr": 0.0359546161177469, "acc_norm": 0.7862595419847328, "acc_norm_stderr": 0.0359546161177469 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8181818181818182, "acc_stderr": 0.03520893951097653, "acc_norm": 0.8181818181818182, "acc_norm_stderr": 0.03520893951097653 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7407407407407407, "acc_stderr": 0.042365112580946336, "acc_norm": 0.7407407407407407, "acc_norm_stderr": 0.042365112580946336 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7730061349693251, "acc_stderr": 0.03291099578615769, "acc_norm": 0.7730061349693251, "acc_norm_stderr": 0.03291099578615769 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.41964285714285715, "acc_stderr": 0.04684099321077106, "acc_norm": 0.41964285714285715, "acc_norm_stderr": 0.04684099321077106 }, "harness|hendrycksTest-management|5": { "acc": 0.8446601941747572, "acc_stderr": 0.03586594738573973, "acc_norm": 0.8446601941747572, "acc_norm_stderr": 0.03586594738573973 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8760683760683761, "acc_stderr": 0.02158649400128138, "acc_norm": 0.8760683760683761, "acc_norm_stderr": 0.02158649400128138 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.74, "acc_stderr": 0.044084400227680794, "acc_norm": 0.74, "acc_norm_stderr": 0.044084400227680794 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8275862068965517, "acc_stderr": 0.013507943909371803, "acc_norm": 0.8275862068965517, "acc_norm_stderr": 0.013507943909371803 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7341040462427746, "acc_stderr": 0.02378620325550829, "acc_norm": 0.7341040462427746, "acc_norm_stderr": 0.02378620325550829 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.47039106145251397, "acc_stderr": 0.016693154927383557, "acc_norm": 0.47039106145251397, "acc_norm_stderr": 0.016693154927383557 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7352941176470589, "acc_stderr": 0.025261691219729484, "acc_norm": 0.7352941176470589, "acc_norm_stderr": 0.025261691219729484 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6977491961414791, "acc_stderr": 0.02608270069539966, "acc_norm": 0.6977491961414791, "acc_norm_stderr": 0.02608270069539966 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7345679012345679, "acc_stderr": 0.024569223600460845, "acc_norm": 0.7345679012345679, "acc_norm_stderr": 0.024569223600460845 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.46808510638297873, "acc_stderr": 0.029766675075873866, "acc_norm": 0.46808510638297873, "acc_norm_stderr": 0.029766675075873866 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46479791395045633, "acc_stderr": 0.012738547371303957, "acc_norm": 0.46479791395045633, "acc_norm_stderr": 0.012738547371303957 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6911764705882353, "acc_stderr": 0.028064998167040094, "acc_norm": 0.6911764705882353, "acc_norm_stderr": 0.028064998167040094 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6633986928104575, "acc_stderr": 0.019117213911495144, "acc_norm": 0.6633986928104575, "acc_norm_stderr": 0.019117213911495144 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6545454545454545, "acc_stderr": 0.04554619617541054, "acc_norm": 0.6545454545454545, "acc_norm_stderr": 0.04554619617541054 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.726530612244898, "acc_stderr": 0.028535560337128448, "acc_norm": 0.726530612244898, "acc_norm_stderr": 0.028535560337128448 }, "harness|hendrycksTest-sociology|5": { "acc": 0.845771144278607, "acc_stderr": 0.025538433368578334, "acc_norm": 0.845771144278607, "acc_norm_stderr": 0.025538433368578334 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.89, "acc_stderr": 0.03144660377352203, "acc_norm": 0.89, "acc_norm_stderr": 0.03144660377352203 }, "harness|hendrycksTest-virology|5": { "acc": 0.5301204819277109, "acc_stderr": 0.03885425420866767, "acc_norm": 0.5301204819277109, "acc_norm_stderr": 0.03885425420866767 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.847953216374269, "acc_stderr": 0.027539122889061456, "acc_norm": 0.847953216374269, "acc_norm_stderr": 0.027539122889061456 }, "harness|truthfulqa:mc|0": { "mc1": 0.3047735618115055, "mc1_stderr": 0.01611412415688245, "mc2": 0.4639249177352108, "mc2_stderr": 0.015154559507326514 }, "harness|winogrande|5": { "acc": 0.8058405682715075, "acc_stderr": 0.01111698339239267 }, "harness|gsm8k|5": { "acc": 0.624715693707354, "acc_stderr": 0.013337170545742927 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_perlthoughts__Starling-LM-alpha-8x7B-MoE
[ "region:us" ]
2023-12-16T21:06:57+00:00
{"pretty_name": "Evaluation run of perlthoughts/Starling-LM-alpha-8x7B-MoE", "dataset_summary": "Dataset automatically created during the evaluation run of model [perlthoughts/Starling-LM-alpha-8x7B-MoE](https://huggingface.co/perlthoughts/Starling-LM-alpha-8x7B-MoE) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_perlthoughts__Starling-LM-alpha-8x7B-MoE\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T21:04:03.066898](https://huggingface.co/datasets/open-llm-leaderboard/details_perlthoughts__Starling-LM-alpha-8x7B-MoE/blob/main/results_2023-12-16T21-04-03.066898.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6485827629012115,\n \"acc_stderr\": 0.03189932261733175,\n \"acc_norm\": 0.6500544215634738,\n \"acc_norm_stderr\": 0.032542506459412,\n \"mc1\": 0.3047735618115055,\n \"mc1_stderr\": 0.01611412415688245,\n \"mc2\": 0.4639249177352108,\n \"mc2_stderr\": 0.015154559507326514\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5998293515358362,\n \"acc_stderr\": 0.014317197787809172,\n \"acc_norm\": 0.636518771331058,\n \"acc_norm_stderr\": 0.014056207319068283\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6655048795060745,\n \"acc_stderr\": 0.004708494114574018,\n \"acc_norm\": 0.8490340569607648,\n \"acc_norm_stderr\": 0.0035728399695219874\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6148148148148148,\n \"acc_stderr\": 0.04203921040156279,\n \"acc_norm\": 0.6148148148148148,\n \"acc_norm_stderr\": 0.04203921040156279\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6907894736842105,\n \"acc_stderr\": 0.037610708698674805,\n \"acc_norm\": 0.6907894736842105,\n \"acc_norm_stderr\": 0.037610708698674805\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6867924528301886,\n \"acc_stderr\": 0.028544793319055326,\n \"acc_norm\": 0.6867924528301886,\n \"acc_norm_stderr\": 0.028544793319055326\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7638888888888888,\n \"acc_stderr\": 0.03551446610810826,\n \"acc_norm\": 0.7638888888888888,\n \"acc_norm_stderr\": 0.03551446610810826\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6589595375722543,\n \"acc_stderr\": 0.03614665424180826,\n \"acc_norm\": 0.6589595375722543,\n \"acc_norm_stderr\": 0.03614665424180826\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4411764705882353,\n \"acc_stderr\": 0.049406356306056595,\n \"acc_norm\": 0.4411764705882353,\n \"acc_norm_stderr\": 0.049406356306056595\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768079,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768079\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5531914893617021,\n \"acc_stderr\": 0.032500536843658404,\n \"acc_norm\": 0.5531914893617021,\n \"acc_norm_stderr\": 0.032500536843658404\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5724137931034483,\n \"acc_stderr\": 0.04122737111370332,\n \"acc_norm\": 0.5724137931034483,\n \"acc_norm_stderr\": 0.04122737111370332\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.40476190476190477,\n \"acc_stderr\": 0.025279850397404904,\n \"acc_norm\": 0.40476190476190477,\n \"acc_norm_stderr\": 0.025279850397404904\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5238095238095238,\n \"acc_stderr\": 0.04467062628403273,\n \"acc_norm\": 0.5238095238095238,\n \"acc_norm_stderr\": 0.04467062628403273\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932269,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932269\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7903225806451613,\n \"acc_stderr\": 0.023157879349083525,\n \"acc_norm\": 0.7903225806451613,\n \"acc_norm_stderr\": 0.023157879349083525\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.46798029556650245,\n \"acc_stderr\": 0.035107665979592154,\n \"acc_norm\": 0.46798029556650245,\n \"acc_norm_stderr\": 0.035107665979592154\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7636363636363637,\n \"acc_stderr\": 0.03317505930009181,\n \"acc_norm\": 0.7636363636363637,\n \"acc_norm_stderr\": 0.03317505930009181\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.797979797979798,\n \"acc_stderr\": 0.028606204289229865,\n \"acc_norm\": 0.797979797979798,\n \"acc_norm_stderr\": 0.028606204289229865\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9067357512953368,\n \"acc_stderr\": 0.02098685459328974,\n \"acc_norm\": 0.9067357512953368,\n \"acc_norm_stderr\": 0.02098685459328974\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.676923076923077,\n \"acc_stderr\": 0.02371088850197057,\n \"acc_norm\": 0.676923076923077,\n \"acc_norm_stderr\": 0.02371088850197057\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.32222222222222224,\n \"acc_stderr\": 0.028493465091028593,\n \"acc_norm\": 0.32222222222222224,\n \"acc_norm_stderr\": 0.028493465091028593\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.680672268907563,\n \"acc_stderr\": 0.0302839955258844,\n \"acc_norm\": 0.680672268907563,\n \"acc_norm_stderr\": 0.0302839955258844\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.37748344370860926,\n \"acc_stderr\": 0.03958027231121569,\n \"acc_norm\": 0.37748344370860926,\n \"acc_norm_stderr\": 0.03958027231121569\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8477064220183487,\n \"acc_stderr\": 0.015405084393157074,\n \"acc_norm\": 0.8477064220183487,\n \"acc_norm_stderr\": 0.015405084393157074\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.034099716973523674,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.034099716973523674\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8235294117647058,\n \"acc_stderr\": 0.026756401538078966,\n \"acc_norm\": 0.8235294117647058,\n \"acc_norm_stderr\": 0.026756401538078966\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8270042194092827,\n \"acc_stderr\": 0.024621562866768434,\n \"acc_norm\": 0.8270042194092827,\n \"acc_norm_stderr\": 0.024621562866768434\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7130044843049327,\n \"acc_stderr\": 0.030360379710291947,\n \"acc_norm\": 0.7130044843049327,\n \"acc_norm_stderr\": 0.030360379710291947\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7862595419847328,\n \"acc_stderr\": 0.0359546161177469,\n \"acc_norm\": 0.7862595419847328,\n \"acc_norm_stderr\": 0.0359546161177469\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8181818181818182,\n \"acc_stderr\": 0.03520893951097653,\n \"acc_norm\": 0.8181818181818182,\n \"acc_norm_stderr\": 0.03520893951097653\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7407407407407407,\n \"acc_stderr\": 0.042365112580946336,\n \"acc_norm\": 0.7407407407407407,\n \"acc_norm_stderr\": 0.042365112580946336\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7730061349693251,\n \"acc_stderr\": 0.03291099578615769,\n \"acc_norm\": 0.7730061349693251,\n \"acc_norm_stderr\": 0.03291099578615769\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.41964285714285715,\n \"acc_stderr\": 0.04684099321077106,\n \"acc_norm\": 0.41964285714285715,\n \"acc_norm_stderr\": 0.04684099321077106\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8446601941747572,\n \"acc_stderr\": 0.03586594738573973,\n \"acc_norm\": 0.8446601941747572,\n \"acc_norm_stderr\": 0.03586594738573973\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8760683760683761,\n \"acc_stderr\": 0.02158649400128138,\n \"acc_norm\": 0.8760683760683761,\n \"acc_norm_stderr\": 0.02158649400128138\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.044084400227680794,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.044084400227680794\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8275862068965517,\n \"acc_stderr\": 0.013507943909371803,\n \"acc_norm\": 0.8275862068965517,\n \"acc_norm_stderr\": 0.013507943909371803\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7341040462427746,\n \"acc_stderr\": 0.02378620325550829,\n \"acc_norm\": 0.7341040462427746,\n \"acc_norm_stderr\": 0.02378620325550829\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.47039106145251397,\n \"acc_stderr\": 0.016693154927383557,\n \"acc_norm\": 0.47039106145251397,\n \"acc_norm_stderr\": 0.016693154927383557\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7352941176470589,\n \"acc_stderr\": 0.025261691219729484,\n \"acc_norm\": 0.7352941176470589,\n \"acc_norm_stderr\": 0.025261691219729484\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6977491961414791,\n \"acc_stderr\": 0.02608270069539966,\n \"acc_norm\": 0.6977491961414791,\n \"acc_norm_stderr\": 0.02608270069539966\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7345679012345679,\n \"acc_stderr\": 0.024569223600460845,\n \"acc_norm\": 0.7345679012345679,\n \"acc_norm_stderr\": 0.024569223600460845\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.46808510638297873,\n \"acc_stderr\": 0.029766675075873866,\n \"acc_norm\": 0.46808510638297873,\n \"acc_norm_stderr\": 0.029766675075873866\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46479791395045633,\n \"acc_stderr\": 0.012738547371303957,\n \"acc_norm\": 0.46479791395045633,\n \"acc_norm_stderr\": 0.012738547371303957\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6911764705882353,\n \"acc_stderr\": 0.028064998167040094,\n \"acc_norm\": 0.6911764705882353,\n \"acc_norm_stderr\": 0.028064998167040094\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6633986928104575,\n \"acc_stderr\": 0.019117213911495144,\n \"acc_norm\": 0.6633986928104575,\n \"acc_norm_stderr\": 0.019117213911495144\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.6545454545454545,\n \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.726530612244898,\n \"acc_stderr\": 0.028535560337128448,\n \"acc_norm\": 0.726530612244898,\n \"acc_norm_stderr\": 0.028535560337128448\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.845771144278607,\n \"acc_stderr\": 0.025538433368578334,\n \"acc_norm\": 0.845771144278607,\n \"acc_norm_stderr\": 0.025538433368578334\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.89,\n \"acc_stderr\": 0.03144660377352203,\n \"acc_norm\": 0.89,\n \"acc_norm_stderr\": 0.03144660377352203\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5301204819277109,\n \"acc_stderr\": 0.03885425420866767,\n \"acc_norm\": 0.5301204819277109,\n \"acc_norm_stderr\": 0.03885425420866767\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.847953216374269,\n \"acc_stderr\": 0.027539122889061456,\n \"acc_norm\": 0.847953216374269,\n \"acc_norm_stderr\": 0.027539122889061456\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3047735618115055,\n \"mc1_stderr\": 0.01611412415688245,\n \"mc2\": 0.4639249177352108,\n \"mc2_stderr\": 0.015154559507326514\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8058405682715075,\n \"acc_stderr\": 0.01111698339239267\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.624715693707354,\n \"acc_stderr\": 0.013337170545742927\n }\n}\n```", "repo_url": "https://huggingface.co/perlthoughts/Starling-LM-alpha-8x7B-MoE", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|arc:challenge|25_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|gsm8k|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hellaswag|10_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T21-04-03.066898.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["**/details_harness|winogrande|5_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T21-04-03.066898.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T21_04_03.066898", "path": ["results_2023-12-16T21-04-03.066898.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T21-04-03.066898.parquet"]}]}]}
2023-12-16T21:07:41+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of perlthoughts/Starling-LM-alpha-8x7B-MoE Dataset automatically created during the evaluation run of model perlthoughts/Starling-LM-alpha-8x7B-MoE on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T21:04:03.066898(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of perlthoughts/Starling-LM-alpha-8x7B-MoE\n\n\n\nDataset automatically created during the evaluation run of model perlthoughts/Starling-LM-alpha-8x7B-MoE on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T21:04:03.066898(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of perlthoughts/Starling-LM-alpha-8x7B-MoE\n\n\n\nDataset automatically created during the evaluation run of model perlthoughts/Starling-LM-alpha-8x7B-MoE on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T21:04:03.066898(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 199, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of perlthoughts/Starling-LM-alpha-8x7B-MoE\n\n\n\nDataset automatically created during the evaluation run of model perlthoughts/Starling-LM-alpha-8x7B-MoE on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T21:04:03.066898(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
0df01ad353e48b84410abddbb932bd02520a76ba
# Dataset Card for Evaluation run of perlthoughts/Chupacabra-8x7B-MoE <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [perlthoughts/Chupacabra-8x7B-MoE](https://huggingface.co/perlthoughts/Chupacabra-8x7B-MoE) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_perlthoughts__Chupacabra-8x7B-MoE", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T21:20:16.522598](https://huggingface.co/datasets/open-llm-leaderboard/details_perlthoughts__Chupacabra-8x7B-MoE/blob/main/results_2023-12-16T21-20-16.522598.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6415463145761969, "acc_stderr": 0.03233222952484684, "acc_norm": 0.6432118874966731, "acc_norm_stderr": 0.03298409149127022, "mc1": 0.47613219094247244, "mc1_stderr": 0.017483547156961578, "mc2": 0.6350369384683723, "mc2_stderr": 0.01508168993616602 }, "harness|arc:challenge|25": { "acc": 0.6561433447098977, "acc_stderr": 0.01388064457015621, "acc_norm": 0.6877133105802048, "acc_norm_stderr": 0.013542598541688067 }, "harness|hellaswag|10": { "acc": 0.6757618004381597, "acc_stderr": 0.004671328673217797, "acc_norm": 0.8610834495120494, "acc_norm_stderr": 0.003451525868724678 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6518518518518519, "acc_stderr": 0.041153246103369526, "acc_norm": 0.6518518518518519, "acc_norm_stderr": 0.041153246103369526 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6907894736842105, "acc_stderr": 0.037610708698674805, "acc_norm": 0.6907894736842105, "acc_norm_stderr": 0.037610708698674805 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.62, "acc_stderr": 0.048783173121456316, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6830188679245283, "acc_stderr": 0.02863723563980089, "acc_norm": 0.6830188679245283, "acc_norm_stderr": 0.02863723563980089 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7361111111111112, "acc_stderr": 0.03685651095897532, "acc_norm": 0.7361111111111112, "acc_norm_stderr": 0.03685651095897532 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.653179190751445, "acc_stderr": 0.036291466701596636, "acc_norm": 0.653179190751445, "acc_norm_stderr": 0.036291466701596636 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.39215686274509803, "acc_stderr": 0.04858083574266345, "acc_norm": 0.39215686274509803, "acc_norm_stderr": 0.04858083574266345 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.74, "acc_stderr": 0.04408440022768078, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6, "acc_stderr": 0.03202563076101735, "acc_norm": 0.6, "acc_norm_stderr": 0.03202563076101735 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.45614035087719296, "acc_stderr": 0.046854730419077895, "acc_norm": 0.45614035087719296, "acc_norm_stderr": 0.046854730419077895 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5586206896551724, "acc_stderr": 0.04137931034482757, "acc_norm": 0.5586206896551724, "acc_norm_stderr": 0.04137931034482757 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.41534391534391535, "acc_stderr": 0.025379524910778408, "acc_norm": 0.41534391534391535, "acc_norm_stderr": 0.025379524910778408 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42063492063492064, "acc_stderr": 0.04415438226743744, "acc_norm": 0.42063492063492064, "acc_norm_stderr": 0.04415438226743744 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.39, "acc_stderr": 0.04902071300001974, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001974 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7612903225806451, "acc_stderr": 0.02425107126220884, "acc_norm": 0.7612903225806451, "acc_norm_stderr": 0.02425107126220884 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.49261083743842365, "acc_stderr": 0.035176035403610084, "acc_norm": 0.49261083743842365, "acc_norm_stderr": 0.035176035403610084 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.74, "acc_stderr": 0.044084400227680794, "acc_norm": 0.74, "acc_norm_stderr": 0.044084400227680794 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7757575757575758, "acc_stderr": 0.032568666616811015, "acc_norm": 0.7757575757575758, "acc_norm_stderr": 0.032568666616811015 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7878787878787878, "acc_stderr": 0.029126522834586815, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.029126522834586815 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8652849740932642, "acc_stderr": 0.02463978909770944, "acc_norm": 0.8652849740932642, "acc_norm_stderr": 0.02463978909770944 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6794871794871795, "acc_stderr": 0.023661296393964273, "acc_norm": 0.6794871794871795, "acc_norm_stderr": 0.023661296393964273 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.32592592592592595, "acc_stderr": 0.028578348365473082, "acc_norm": 0.32592592592592595, "acc_norm_stderr": 0.028578348365473082 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7016806722689075, "acc_stderr": 0.02971914287634286, "acc_norm": 0.7016806722689075, "acc_norm_stderr": 0.02971914287634286 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.038020397601079024, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.038020397601079024 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8477064220183487, "acc_stderr": 0.015405084393157074, "acc_norm": 0.8477064220183487, "acc_norm_stderr": 0.015405084393157074 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5509259259259259, "acc_stderr": 0.03392238405321617, "acc_norm": 0.5509259259259259, "acc_norm_stderr": 0.03392238405321617 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8137254901960784, "acc_stderr": 0.027325470966716312, "acc_norm": 0.8137254901960784, "acc_norm_stderr": 0.027325470966716312 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7848101265822784, "acc_stderr": 0.026750826994676177, "acc_norm": 0.7848101265822784, "acc_norm_stderr": 0.026750826994676177 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6905829596412556, "acc_stderr": 0.03102441174057221, "acc_norm": 0.6905829596412556, "acc_norm_stderr": 0.03102441174057221 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7633587786259542, "acc_stderr": 0.03727673575596913, "acc_norm": 0.7633587786259542, "acc_norm_stderr": 0.03727673575596913 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7768595041322314, "acc_stderr": 0.03800754475228733, "acc_norm": 0.7768595041322314, "acc_norm_stderr": 0.03800754475228733 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7777777777777778, "acc_stderr": 0.040191074725573483, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.040191074725573483 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.754601226993865, "acc_stderr": 0.03380939813943354, "acc_norm": 0.754601226993865, "acc_norm_stderr": 0.03380939813943354 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.45535714285714285, "acc_stderr": 0.047268355537191, "acc_norm": 0.45535714285714285, "acc_norm_stderr": 0.047268355537191 }, "harness|hendrycksTest-management|5": { "acc": 0.7961165048543689, "acc_stderr": 0.0398913985953177, "acc_norm": 0.7961165048543689, "acc_norm_stderr": 0.0398913985953177 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8675213675213675, "acc_stderr": 0.022209309073165616, "acc_norm": 0.8675213675213675, "acc_norm_stderr": 0.022209309073165616 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.822477650063857, "acc_stderr": 0.01366423099583483, "acc_norm": 0.822477650063857, "acc_norm_stderr": 0.01366423099583483 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7312138728323699, "acc_stderr": 0.023868003262500104, "acc_norm": 0.7312138728323699, "acc_norm_stderr": 0.023868003262500104 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4033519553072626, "acc_stderr": 0.01640712303219525, "acc_norm": 0.4033519553072626, "acc_norm_stderr": 0.01640712303219525 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.696078431372549, "acc_stderr": 0.026336613469046623, "acc_norm": 0.696078431372549, "acc_norm_stderr": 0.026336613469046623 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7138263665594855, "acc_stderr": 0.02567025924218893, "acc_norm": 0.7138263665594855, "acc_norm_stderr": 0.02567025924218893 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7376543209876543, "acc_stderr": 0.02447722285613511, "acc_norm": 0.7376543209876543, "acc_norm_stderr": 0.02447722285613511 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4858156028368794, "acc_stderr": 0.02981549448368206, "acc_norm": 0.4858156028368794, "acc_norm_stderr": 0.02981549448368206 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4589308996088657, "acc_stderr": 0.012727084826799802, "acc_norm": 0.4589308996088657, "acc_norm_stderr": 0.012727084826799802 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6727941176470589, "acc_stderr": 0.028501452860396556, "acc_norm": 0.6727941176470589, "acc_norm_stderr": 0.028501452860396556 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6584967320261438, "acc_stderr": 0.019184639328092487, "acc_norm": 0.6584967320261438, "acc_norm_stderr": 0.019184639328092487 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6454545454545455, "acc_stderr": 0.045820048415054174, "acc_norm": 0.6454545454545455, "acc_norm_stderr": 0.045820048415054174 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7020408163265306, "acc_stderr": 0.029279567411065677, "acc_norm": 0.7020408163265306, "acc_norm_stderr": 0.029279567411065677 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8407960199004975, "acc_stderr": 0.02587064676616914, "acc_norm": 0.8407960199004975, "acc_norm_stderr": 0.02587064676616914 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.84, "acc_stderr": 0.03684529491774709, "acc_norm": 0.84, "acc_norm_stderr": 0.03684529491774709 }, "harness|hendrycksTest-virology|5": { "acc": 0.5180722891566265, "acc_stderr": 0.03889951252827216, "acc_norm": 0.5180722891566265, "acc_norm_stderr": 0.03889951252827216 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8187134502923976, "acc_stderr": 0.02954774168764004, "acc_norm": 0.8187134502923976, "acc_norm_stderr": 0.02954774168764004 }, "harness|truthfulqa:mc|0": { "mc1": 0.47613219094247244, "mc1_stderr": 0.017483547156961578, "mc2": 0.6350369384683723, "mc2_stderr": 0.01508168993616602 }, "harness|winogrande|5": { "acc": 0.8050513022888713, "acc_stderr": 0.011134099415938278 }, "harness|gsm8k|5": { "acc": 0.5966641394996209, "acc_stderr": 0.013512654781814706 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_perlthoughts__Chupacabra-8x7B-MoE
[ "region:us" ]
2023-12-16T21:23:10+00:00
{"pretty_name": "Evaluation run of perlthoughts/Chupacabra-8x7B-MoE", "dataset_summary": "Dataset automatically created during the evaluation run of model [perlthoughts/Chupacabra-8x7B-MoE](https://huggingface.co/perlthoughts/Chupacabra-8x7B-MoE) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_perlthoughts__Chupacabra-8x7B-MoE\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T21:20:16.522598](https://huggingface.co/datasets/open-llm-leaderboard/details_perlthoughts__Chupacabra-8x7B-MoE/blob/main/results_2023-12-16T21-20-16.522598.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6415463145761969,\n \"acc_stderr\": 0.03233222952484684,\n \"acc_norm\": 0.6432118874966731,\n \"acc_norm_stderr\": 0.03298409149127022,\n \"mc1\": 0.47613219094247244,\n \"mc1_stderr\": 0.017483547156961578,\n \"mc2\": 0.6350369384683723,\n \"mc2_stderr\": 0.01508168993616602\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6561433447098977,\n \"acc_stderr\": 0.01388064457015621,\n \"acc_norm\": 0.6877133105802048,\n \"acc_norm_stderr\": 0.013542598541688067\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6757618004381597,\n \"acc_stderr\": 0.004671328673217797,\n \"acc_norm\": 0.8610834495120494,\n \"acc_norm_stderr\": 0.003451525868724678\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6518518518518519,\n \"acc_stderr\": 0.041153246103369526,\n \"acc_norm\": 0.6518518518518519,\n \"acc_norm_stderr\": 0.041153246103369526\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6907894736842105,\n \"acc_stderr\": 0.037610708698674805,\n \"acc_norm\": 0.6907894736842105,\n \"acc_norm_stderr\": 0.037610708698674805\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6830188679245283,\n \"acc_stderr\": 0.02863723563980089,\n \"acc_norm\": 0.6830188679245283,\n \"acc_norm_stderr\": 0.02863723563980089\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7361111111111112,\n \"acc_stderr\": 0.03685651095897532,\n \"acc_norm\": 0.7361111111111112,\n \"acc_norm_stderr\": 0.03685651095897532\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.653179190751445,\n \"acc_stderr\": 0.036291466701596636,\n \"acc_norm\": 0.653179190751445,\n \"acc_norm_stderr\": 0.036291466701596636\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.39215686274509803,\n \"acc_stderr\": 0.04858083574266345,\n \"acc_norm\": 0.39215686274509803,\n \"acc_norm_stderr\": 0.04858083574266345\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.03202563076101735,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.03202563076101735\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.45614035087719296,\n \"acc_stderr\": 0.046854730419077895,\n \"acc_norm\": 0.45614035087719296,\n \"acc_norm_stderr\": 0.046854730419077895\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5586206896551724,\n \"acc_stderr\": 0.04137931034482757,\n \"acc_norm\": 0.5586206896551724,\n \"acc_norm_stderr\": 0.04137931034482757\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41534391534391535,\n \"acc_stderr\": 0.025379524910778408,\n \"acc_norm\": 0.41534391534391535,\n \"acc_norm_stderr\": 0.025379524910778408\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42063492063492064,\n \"acc_stderr\": 0.04415438226743744,\n \"acc_norm\": 0.42063492063492064,\n \"acc_norm_stderr\": 0.04415438226743744\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001974,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001974\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7612903225806451,\n \"acc_stderr\": 0.02425107126220884,\n \"acc_norm\": 0.7612903225806451,\n \"acc_norm_stderr\": 0.02425107126220884\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.49261083743842365,\n \"acc_stderr\": 0.035176035403610084,\n \"acc_norm\": 0.49261083743842365,\n \"acc_norm_stderr\": 0.035176035403610084\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.044084400227680794,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.044084400227680794\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.032568666616811015,\n \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.032568666616811015\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.029126522834586815,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.029126522834586815\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8652849740932642,\n \"acc_stderr\": 0.02463978909770944,\n \"acc_norm\": 0.8652849740932642,\n \"acc_norm_stderr\": 0.02463978909770944\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6794871794871795,\n \"acc_stderr\": 0.023661296393964273,\n \"acc_norm\": 0.6794871794871795,\n \"acc_norm_stderr\": 0.023661296393964273\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.32592592592592595,\n \"acc_stderr\": 0.028578348365473082,\n \"acc_norm\": 0.32592592592592595,\n \"acc_norm_stderr\": 0.028578348365473082\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7016806722689075,\n \"acc_stderr\": 0.02971914287634286,\n \"acc_norm\": 0.7016806722689075,\n \"acc_norm_stderr\": 0.02971914287634286\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31788079470198677,\n \"acc_stderr\": 0.038020397601079024,\n \"acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.038020397601079024\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8477064220183487,\n \"acc_stderr\": 0.015405084393157074,\n \"acc_norm\": 0.8477064220183487,\n \"acc_norm_stderr\": 0.015405084393157074\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5509259259259259,\n \"acc_stderr\": 0.03392238405321617,\n \"acc_norm\": 0.5509259259259259,\n \"acc_norm_stderr\": 0.03392238405321617\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8137254901960784,\n \"acc_stderr\": 0.027325470966716312,\n \"acc_norm\": 0.8137254901960784,\n \"acc_norm_stderr\": 0.027325470966716312\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7848101265822784,\n \"acc_stderr\": 0.026750826994676177,\n \"acc_norm\": 0.7848101265822784,\n \"acc_norm_stderr\": 0.026750826994676177\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6905829596412556,\n \"acc_stderr\": 0.03102441174057221,\n \"acc_norm\": 0.6905829596412556,\n \"acc_norm_stderr\": 0.03102441174057221\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7633587786259542,\n \"acc_stderr\": 0.03727673575596913,\n \"acc_norm\": 0.7633587786259542,\n \"acc_norm_stderr\": 0.03727673575596913\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228733,\n \"acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228733\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.040191074725573483,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.040191074725573483\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.754601226993865,\n \"acc_stderr\": 0.03380939813943354,\n \"acc_norm\": 0.754601226993865,\n \"acc_norm_stderr\": 0.03380939813943354\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.45535714285714285,\n \"acc_stderr\": 0.047268355537191,\n \"acc_norm\": 0.45535714285714285,\n \"acc_norm_stderr\": 0.047268355537191\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7961165048543689,\n \"acc_stderr\": 0.0398913985953177,\n \"acc_norm\": 0.7961165048543689,\n \"acc_norm_stderr\": 0.0398913985953177\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8675213675213675,\n \"acc_stderr\": 0.022209309073165616,\n \"acc_norm\": 0.8675213675213675,\n \"acc_norm_stderr\": 0.022209309073165616\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.822477650063857,\n \"acc_stderr\": 0.01366423099583483,\n \"acc_norm\": 0.822477650063857,\n \"acc_norm_stderr\": 0.01366423099583483\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7312138728323699,\n \"acc_stderr\": 0.023868003262500104,\n \"acc_norm\": 0.7312138728323699,\n \"acc_norm_stderr\": 0.023868003262500104\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4033519553072626,\n \"acc_stderr\": 0.01640712303219525,\n \"acc_norm\": 0.4033519553072626,\n \"acc_norm_stderr\": 0.01640712303219525\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.696078431372549,\n \"acc_stderr\": 0.026336613469046623,\n \"acc_norm\": 0.696078431372549,\n \"acc_norm_stderr\": 0.026336613469046623\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7138263665594855,\n \"acc_stderr\": 0.02567025924218893,\n \"acc_norm\": 0.7138263665594855,\n \"acc_norm_stderr\": 0.02567025924218893\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7376543209876543,\n \"acc_stderr\": 0.02447722285613511,\n \"acc_norm\": 0.7376543209876543,\n \"acc_norm_stderr\": 0.02447722285613511\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4858156028368794,\n \"acc_stderr\": 0.02981549448368206,\n \"acc_norm\": 0.4858156028368794,\n \"acc_norm_stderr\": 0.02981549448368206\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4589308996088657,\n \"acc_stderr\": 0.012727084826799802,\n \"acc_norm\": 0.4589308996088657,\n \"acc_norm_stderr\": 0.012727084826799802\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6727941176470589,\n \"acc_stderr\": 0.028501452860396556,\n \"acc_norm\": 0.6727941176470589,\n \"acc_norm_stderr\": 0.028501452860396556\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6584967320261438,\n \"acc_stderr\": 0.019184639328092487,\n \"acc_norm\": 0.6584967320261438,\n \"acc_norm_stderr\": 0.019184639328092487\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6454545454545455,\n \"acc_stderr\": 0.045820048415054174,\n \"acc_norm\": 0.6454545454545455,\n \"acc_norm_stderr\": 0.045820048415054174\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7020408163265306,\n \"acc_stderr\": 0.029279567411065677,\n \"acc_norm\": 0.7020408163265306,\n \"acc_norm_stderr\": 0.029279567411065677\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.02587064676616914,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.02587064676616914\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.84,\n \"acc_stderr\": 0.03684529491774709,\n \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.03684529491774709\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5180722891566265,\n \"acc_stderr\": 0.03889951252827216,\n \"acc_norm\": 0.5180722891566265,\n \"acc_norm_stderr\": 0.03889951252827216\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8187134502923976,\n \"acc_stderr\": 0.02954774168764004,\n \"acc_norm\": 0.8187134502923976,\n \"acc_norm_stderr\": 0.02954774168764004\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.47613219094247244,\n \"mc1_stderr\": 0.017483547156961578,\n \"mc2\": 0.6350369384683723,\n \"mc2_stderr\": 0.01508168993616602\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8050513022888713,\n \"acc_stderr\": 0.011134099415938278\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5966641394996209,\n \"acc_stderr\": 0.013512654781814706\n }\n}\n```", "repo_url": "https://huggingface.co/perlthoughts/Chupacabra-8x7B-MoE", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|arc:challenge|25_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|gsm8k|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hellaswag|10_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T21-20-16.522598.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["**/details_harness|winogrande|5_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T21-20-16.522598.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T21_20_16.522598", "path": ["results_2023-12-16T21-20-16.522598.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T21-20-16.522598.parquet"]}]}]}
2023-12-16T21:23:53+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of perlthoughts/Chupacabra-8x7B-MoE Dataset automatically created during the evaluation run of model perlthoughts/Chupacabra-8x7B-MoE on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T21:20:16.522598(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of perlthoughts/Chupacabra-8x7B-MoE\n\n\n\nDataset automatically created during the evaluation run of model perlthoughts/Chupacabra-8x7B-MoE on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T21:20:16.522598(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of perlthoughts/Chupacabra-8x7B-MoE\n\n\n\nDataset automatically created during the evaluation run of model perlthoughts/Chupacabra-8x7B-MoE on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T21:20:16.522598(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 193, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of perlthoughts/Chupacabra-8x7B-MoE\n\n\n\nDataset automatically created during the evaluation run of model perlthoughts/Chupacabra-8x7B-MoE on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T21:20:16.522598(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
dbded1c5b7363dab13c746e0b3602476b48d946a
# Dataset Card for Evaluation run of Undi95/Mixtral-8x7B-MoE-RP-Story <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Undi95/Mixtral-8x7B-MoE-RP-Story](https://huggingface.co/Undi95/Mixtral-8x7B-MoE-RP-Story) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Undi95__Mixtral-8x7B-MoE-RP-Story", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T21:32:27.266201](https://huggingface.co/datasets/open-llm-leaderboard/details_Undi95__Mixtral-8x7B-MoE-RP-Story/blob/main/results_2023-12-16T21-32-27.266201.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.43068446823982826, "acc_stderr": 0.03444996506735285, "acc_norm": 0.43640169503643583, "acc_norm_stderr": 0.03524813638857257, "mc1": 0.26438188494492043, "mc1_stderr": 0.015438211119522514, "mc2": 0.41531240642156975, "mc2_stderr": 0.01492327563743382 }, "harness|arc:challenge|25": { "acc": 0.46501706484641636, "acc_stderr": 0.014575583922019665, "acc_norm": 0.515358361774744, "acc_norm_stderr": 0.014604496129394904 }, "harness|hellaswag|10": { "acc": 0.5017924716191994, "acc_stderr": 0.004989749347461088, "acc_norm": 0.6999601672973511, "acc_norm_stderr": 0.004573383672159088 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.24, "acc_stderr": 0.042923469599092816, "acc_norm": 0.24, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.43703703703703706, "acc_stderr": 0.04284958639753399, "acc_norm": 0.43703703703703706, "acc_norm_stderr": 0.04284958639753399 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.4934210526315789, "acc_stderr": 0.040685900502249704, "acc_norm": 0.4934210526315789, "acc_norm_stderr": 0.040685900502249704 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.39, "acc_stderr": 0.04902071300001974, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001974 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.41509433962264153, "acc_stderr": 0.030325945789286105, "acc_norm": 0.41509433962264153, "acc_norm_stderr": 0.030325945789286105 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.4513888888888889, "acc_stderr": 0.041614023984032786, "acc_norm": 0.4513888888888889, "acc_norm_stderr": 0.041614023984032786 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.047609522856952344, "acc_norm": 0.34, "acc_norm_stderr": 0.047609522856952344 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.3988439306358382, "acc_stderr": 0.037336266553835096, "acc_norm": 0.3988439306358382, "acc_norm_stderr": 0.037336266553835096 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.24509803921568626, "acc_stderr": 0.042801058373643966, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.042801058373643966 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.3574468085106383, "acc_stderr": 0.03132941789476425, "acc_norm": 0.3574468085106383, "acc_norm_stderr": 0.03132941789476425 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3508771929824561, "acc_stderr": 0.044895393502707, "acc_norm": 0.3508771929824561, "acc_norm_stderr": 0.044895393502707 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.43448275862068964, "acc_stderr": 0.04130740879555497, "acc_norm": 0.43448275862068964, "acc_norm_stderr": 0.04130740879555497 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2777777777777778, "acc_stderr": 0.023068188848261114, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.023068188848261114 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.23809523809523808, "acc_stderr": 0.03809523809523812, "acc_norm": 0.23809523809523808, "acc_norm_stderr": 0.03809523809523812 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.26, "acc_stderr": 0.04408440022768078, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.41935483870967744, "acc_stderr": 0.028071588901091838, "acc_norm": 0.41935483870967744, "acc_norm_stderr": 0.028071588901091838 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.2660098522167488, "acc_stderr": 0.03108982600293753, "acc_norm": 0.2660098522167488, "acc_norm_stderr": 0.03108982600293753 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.36363636363636365, "acc_stderr": 0.03756335775187896, "acc_norm": 0.36363636363636365, "acc_norm_stderr": 0.03756335775187896 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.5151515151515151, "acc_stderr": 0.03560716516531061, "acc_norm": 0.5151515151515151, "acc_norm_stderr": 0.03560716516531061 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.6424870466321243, "acc_stderr": 0.034588160421810114, "acc_norm": 0.6424870466321243, "acc_norm_stderr": 0.034588160421810114 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.44358974358974357, "acc_stderr": 0.025189149894764205, "acc_norm": 0.44358974358974357, "acc_norm_stderr": 0.025189149894764205 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2740740740740741, "acc_stderr": 0.027195934804085622, "acc_norm": 0.2740740740740741, "acc_norm_stderr": 0.027195934804085622 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.4327731092436975, "acc_stderr": 0.03218358107742613, "acc_norm": 0.4327731092436975, "acc_norm_stderr": 0.03218358107742613 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.03802039760107903, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.03802039760107903 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.5045871559633027, "acc_stderr": 0.021436420955529414, "acc_norm": 0.5045871559633027, "acc_norm_stderr": 0.021436420955529414 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.3472222222222222, "acc_stderr": 0.032468872436376486, "acc_norm": 0.3472222222222222, "acc_norm_stderr": 0.032468872436376486 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.4166666666666667, "acc_stderr": 0.0346022832723917, "acc_norm": 0.4166666666666667, "acc_norm_stderr": 0.0346022832723917 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.46835443037974683, "acc_stderr": 0.03248197400511075, "acc_norm": 0.46835443037974683, "acc_norm_stderr": 0.03248197400511075 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.45739910313901344, "acc_stderr": 0.033435777055830646, "acc_norm": 0.45739910313901344, "acc_norm_stderr": 0.033435777055830646 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.5267175572519084, "acc_stderr": 0.04379024936553893, "acc_norm": 0.5267175572519084, "acc_norm_stderr": 0.04379024936553893 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6528925619834711, "acc_stderr": 0.043457245702925335, "acc_norm": 0.6528925619834711, "acc_norm_stderr": 0.043457245702925335 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.42592592592592593, "acc_stderr": 0.0478034362693679, "acc_norm": 0.42592592592592593, "acc_norm_stderr": 0.0478034362693679 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.4601226993865031, "acc_stderr": 0.03915857291436972, "acc_norm": 0.4601226993865031, "acc_norm_stderr": 0.03915857291436972 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4732142857142857, "acc_stderr": 0.047389751192741546, "acc_norm": 0.4732142857142857, "acc_norm_stderr": 0.047389751192741546 }, "harness|hendrycksTest-management|5": { "acc": 0.5339805825242718, "acc_stderr": 0.0493929144727348, "acc_norm": 0.5339805825242718, "acc_norm_stderr": 0.0493929144727348 }, "harness|hendrycksTest-marketing|5": { "acc": 0.5726495726495726, "acc_stderr": 0.03240847393516327, "acc_norm": 0.5726495726495726, "acc_norm_stderr": 0.03240847393516327 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.43, "acc_stderr": 0.04975698519562428, "acc_norm": 0.43, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.5312899106002554, "acc_stderr": 0.01784491809046855, "acc_norm": 0.5312899106002554, "acc_norm_stderr": 0.01784491809046855 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.43641618497109824, "acc_stderr": 0.026700545424943687, "acc_norm": 0.43641618497109824, "acc_norm_stderr": 0.026700545424943687 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.28044692737430166, "acc_stderr": 0.015024083883322893, "acc_norm": 0.28044692737430166, "acc_norm_stderr": 0.015024083883322893 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.45751633986928103, "acc_stderr": 0.02852638345214263, "acc_norm": 0.45751633986928103, "acc_norm_stderr": 0.02852638345214263 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.4533762057877814, "acc_stderr": 0.02827435985489424, "acc_norm": 0.4533762057877814, "acc_norm_stderr": 0.02827435985489424 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.49691358024691357, "acc_stderr": 0.027820214158594377, "acc_norm": 0.49691358024691357, "acc_norm_stderr": 0.027820214158594377 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.36879432624113473, "acc_stderr": 0.028782227561347257, "acc_norm": 0.36879432624113473, "acc_norm_stderr": 0.028782227561347257 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.31290743155149936, "acc_stderr": 0.011842529823062995, "acc_norm": 0.31290743155149936, "acc_norm_stderr": 0.011842529823062995 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.45588235294117646, "acc_stderr": 0.030254372573976694, "acc_norm": 0.45588235294117646, "acc_norm_stderr": 0.030254372573976694 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.4117647058823529, "acc_stderr": 0.01991037746310593, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.01991037746310593 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.509090909090909, "acc_stderr": 0.04788339768702861, "acc_norm": 0.509090909090909, "acc_norm_stderr": 0.04788339768702861 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.5918367346938775, "acc_stderr": 0.03146465712827423, "acc_norm": 0.5918367346938775, "acc_norm_stderr": 0.03146465712827423 }, "harness|hendrycksTest-sociology|5": { "acc": 0.572139303482587, "acc_stderr": 0.03498541988407795, "acc_norm": 0.572139303482587, "acc_norm_stderr": 0.03498541988407795 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-virology|5": { "acc": 0.39156626506024095, "acc_stderr": 0.03799857454479636, "acc_norm": 0.39156626506024095, "acc_norm_stderr": 0.03799857454479636 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.6081871345029239, "acc_stderr": 0.03743979825926399, "acc_norm": 0.6081871345029239, "acc_norm_stderr": 0.03743979825926399 }, "harness|truthfulqa:mc|0": { "mc1": 0.26438188494492043, "mc1_stderr": 0.015438211119522514, "mc2": 0.41531240642156975, "mc2_stderr": 0.01492327563743382 }, "harness|winogrande|5": { "acc": 0.6732438831886346, "acc_stderr": 0.013181997302131366 }, "harness|gsm8k|5": { "acc": 0.09931766489764973, "acc_stderr": 0.008238371412683963 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Undi95__Mixtral-8x7B-MoE-RP-Story
[ "region:us" ]
2023-12-16T21:35:22+00:00
{"pretty_name": "Evaluation run of Undi95/Mixtral-8x7B-MoE-RP-Story", "dataset_summary": "Dataset automatically created during the evaluation run of model [Undi95/Mixtral-8x7B-MoE-RP-Story](https://huggingface.co/Undi95/Mixtral-8x7B-MoE-RP-Story) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Undi95__Mixtral-8x7B-MoE-RP-Story\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T21:32:27.266201](https://huggingface.co/datasets/open-llm-leaderboard/details_Undi95__Mixtral-8x7B-MoE-RP-Story/blob/main/results_2023-12-16T21-32-27.266201.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.43068446823982826,\n \"acc_stderr\": 0.03444996506735285,\n \"acc_norm\": 0.43640169503643583,\n \"acc_norm_stderr\": 0.03524813638857257,\n \"mc1\": 0.26438188494492043,\n \"mc1_stderr\": 0.015438211119522514,\n \"mc2\": 0.41531240642156975,\n \"mc2_stderr\": 0.01492327563743382\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.46501706484641636,\n \"acc_stderr\": 0.014575583922019665,\n \"acc_norm\": 0.515358361774744,\n \"acc_norm_stderr\": 0.014604496129394904\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5017924716191994,\n \"acc_stderr\": 0.004989749347461088,\n \"acc_norm\": 0.6999601672973511,\n \"acc_norm_stderr\": 0.004573383672159088\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.43703703703703706,\n \"acc_stderr\": 0.04284958639753399,\n \"acc_norm\": 0.43703703703703706,\n \"acc_norm_stderr\": 0.04284958639753399\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.4934210526315789,\n \"acc_stderr\": 0.040685900502249704,\n \"acc_norm\": 0.4934210526315789,\n \"acc_norm_stderr\": 0.040685900502249704\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001974,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001974\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.41509433962264153,\n \"acc_stderr\": 0.030325945789286105,\n \"acc_norm\": 0.41509433962264153,\n \"acc_norm_stderr\": 0.030325945789286105\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.4513888888888889,\n \"acc_stderr\": 0.041614023984032786,\n \"acc_norm\": 0.4513888888888889,\n \"acc_norm_stderr\": 0.041614023984032786\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.047609522856952344,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.047609522856952344\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3988439306358382,\n \"acc_stderr\": 0.037336266553835096,\n \"acc_norm\": 0.3988439306358382,\n \"acc_norm_stderr\": 0.037336266553835096\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.24509803921568626,\n \"acc_stderr\": 0.042801058373643966,\n \"acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.042801058373643966\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.3574468085106383,\n \"acc_stderr\": 0.03132941789476425,\n \"acc_norm\": 0.3574468085106383,\n \"acc_norm_stderr\": 0.03132941789476425\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3508771929824561,\n \"acc_stderr\": 0.044895393502707,\n \"acc_norm\": 0.3508771929824561,\n \"acc_norm_stderr\": 0.044895393502707\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.43448275862068964,\n \"acc_stderr\": 0.04130740879555497,\n \"acc_norm\": 0.43448275862068964,\n \"acc_norm_stderr\": 0.04130740879555497\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2777777777777778,\n \"acc_stderr\": 0.023068188848261114,\n \"acc_norm\": 0.2777777777777778,\n \"acc_norm_stderr\": 0.023068188848261114\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.23809523809523808,\n \"acc_stderr\": 0.03809523809523812,\n \"acc_norm\": 0.23809523809523808,\n \"acc_norm_stderr\": 0.03809523809523812\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.41935483870967744,\n \"acc_stderr\": 0.028071588901091838,\n \"acc_norm\": 0.41935483870967744,\n \"acc_norm_stderr\": 0.028071588901091838\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.2660098522167488,\n \"acc_stderr\": 0.03108982600293753,\n \"acc_norm\": 0.2660098522167488,\n \"acc_norm_stderr\": 0.03108982600293753\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.36363636363636365,\n \"acc_stderr\": 0.03756335775187896,\n \"acc_norm\": 0.36363636363636365,\n \"acc_norm_stderr\": 0.03756335775187896\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.5151515151515151,\n \"acc_stderr\": 0.03560716516531061,\n \"acc_norm\": 0.5151515151515151,\n \"acc_norm_stderr\": 0.03560716516531061\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.6424870466321243,\n \"acc_stderr\": 0.034588160421810114,\n \"acc_norm\": 0.6424870466321243,\n \"acc_norm_stderr\": 0.034588160421810114\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.44358974358974357,\n \"acc_stderr\": 0.025189149894764205,\n \"acc_norm\": 0.44358974358974357,\n \"acc_norm_stderr\": 0.025189149894764205\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2740740740740741,\n \"acc_stderr\": 0.027195934804085622,\n \"acc_norm\": 0.2740740740740741,\n \"acc_norm_stderr\": 0.027195934804085622\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.4327731092436975,\n \"acc_stderr\": 0.03218358107742613,\n \"acc_norm\": 0.4327731092436975,\n \"acc_norm_stderr\": 0.03218358107742613\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31788079470198677,\n \"acc_stderr\": 0.03802039760107903,\n \"acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.03802039760107903\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.5045871559633027,\n \"acc_stderr\": 0.021436420955529414,\n \"acc_norm\": 0.5045871559633027,\n \"acc_norm_stderr\": 0.021436420955529414\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.3472222222222222,\n \"acc_stderr\": 0.032468872436376486,\n \"acc_norm\": 0.3472222222222222,\n \"acc_norm_stderr\": 0.032468872436376486\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.4166666666666667,\n \"acc_stderr\": 0.0346022832723917,\n \"acc_norm\": 0.4166666666666667,\n \"acc_norm_stderr\": 0.0346022832723917\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.46835443037974683,\n \"acc_stderr\": 0.03248197400511075,\n \"acc_norm\": 0.46835443037974683,\n \"acc_norm_stderr\": 0.03248197400511075\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.45739910313901344,\n \"acc_stderr\": 0.033435777055830646,\n \"acc_norm\": 0.45739910313901344,\n \"acc_norm_stderr\": 0.033435777055830646\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.5267175572519084,\n \"acc_stderr\": 0.04379024936553893,\n \"acc_norm\": 0.5267175572519084,\n \"acc_norm_stderr\": 0.04379024936553893\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6528925619834711,\n \"acc_stderr\": 0.043457245702925335,\n \"acc_norm\": 0.6528925619834711,\n \"acc_norm_stderr\": 0.043457245702925335\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.42592592592592593,\n \"acc_stderr\": 0.0478034362693679,\n \"acc_norm\": 0.42592592592592593,\n \"acc_norm_stderr\": 0.0478034362693679\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.4601226993865031,\n \"acc_stderr\": 0.03915857291436972,\n \"acc_norm\": 0.4601226993865031,\n \"acc_norm_stderr\": 0.03915857291436972\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4732142857142857,\n \"acc_stderr\": 0.047389751192741546,\n \"acc_norm\": 0.4732142857142857,\n \"acc_norm_stderr\": 0.047389751192741546\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.5339805825242718,\n \"acc_stderr\": 0.0493929144727348,\n \"acc_norm\": 0.5339805825242718,\n \"acc_norm_stderr\": 0.0493929144727348\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.5726495726495726,\n \"acc_stderr\": 0.03240847393516327,\n \"acc_norm\": 0.5726495726495726,\n \"acc_norm_stderr\": 0.03240847393516327\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.5312899106002554,\n \"acc_stderr\": 0.01784491809046855,\n \"acc_norm\": 0.5312899106002554,\n \"acc_norm_stderr\": 0.01784491809046855\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.43641618497109824,\n \"acc_stderr\": 0.026700545424943687,\n \"acc_norm\": 0.43641618497109824,\n \"acc_norm_stderr\": 0.026700545424943687\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.28044692737430166,\n \"acc_stderr\": 0.015024083883322893,\n \"acc_norm\": 0.28044692737430166,\n \"acc_norm_stderr\": 0.015024083883322893\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.45751633986928103,\n \"acc_stderr\": 0.02852638345214263,\n \"acc_norm\": 0.45751633986928103,\n \"acc_norm_stderr\": 0.02852638345214263\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.4533762057877814,\n \"acc_stderr\": 0.02827435985489424,\n \"acc_norm\": 0.4533762057877814,\n \"acc_norm_stderr\": 0.02827435985489424\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.49691358024691357,\n \"acc_stderr\": 0.027820214158594377,\n \"acc_norm\": 0.49691358024691357,\n \"acc_norm_stderr\": 0.027820214158594377\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.36879432624113473,\n \"acc_stderr\": 0.028782227561347257,\n \"acc_norm\": 0.36879432624113473,\n \"acc_norm_stderr\": 0.028782227561347257\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.31290743155149936,\n \"acc_stderr\": 0.011842529823062995,\n \"acc_norm\": 0.31290743155149936,\n \"acc_norm_stderr\": 0.011842529823062995\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.45588235294117646,\n \"acc_stderr\": 0.030254372573976694,\n \"acc_norm\": 0.45588235294117646,\n \"acc_norm_stderr\": 0.030254372573976694\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.01991037746310593,\n \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.01991037746310593\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.509090909090909,\n \"acc_stderr\": 0.04788339768702861,\n \"acc_norm\": 0.509090909090909,\n \"acc_norm_stderr\": 0.04788339768702861\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.5918367346938775,\n \"acc_stderr\": 0.03146465712827423,\n \"acc_norm\": 0.5918367346938775,\n \"acc_norm_stderr\": 0.03146465712827423\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.572139303482587,\n \"acc_stderr\": 0.03498541988407795,\n \"acc_norm\": 0.572139303482587,\n \"acc_norm_stderr\": 0.03498541988407795\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.39156626506024095,\n \"acc_stderr\": 0.03799857454479636,\n \"acc_norm\": 0.39156626506024095,\n \"acc_norm_stderr\": 0.03799857454479636\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.6081871345029239,\n \"acc_stderr\": 0.03743979825926399,\n \"acc_norm\": 0.6081871345029239,\n \"acc_norm_stderr\": 0.03743979825926399\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.26438188494492043,\n \"mc1_stderr\": 0.015438211119522514,\n \"mc2\": 0.41531240642156975,\n \"mc2_stderr\": 0.01492327563743382\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6732438831886346,\n \"acc_stderr\": 0.013181997302131366\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.09931766489764973,\n \"acc_stderr\": 0.008238371412683963\n }\n}\n```", "repo_url": "https://huggingface.co/Undi95/Mixtral-8x7B-MoE-RP-Story", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|arc:challenge|25_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|gsm8k|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hellaswag|10_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T21-32-27.266201.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["**/details_harness|winogrande|5_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T21-32-27.266201.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T21_32_27.266201", "path": ["results_2023-12-16T21-32-27.266201.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T21-32-27.266201.parquet"]}]}]}
2023-12-16T21:36:05+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Undi95/Mixtral-8x7B-MoE-RP-Story Dataset automatically created during the evaluation run of model Undi95/Mixtral-8x7B-MoE-RP-Story on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T21:32:27.266201(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Undi95/Mixtral-8x7B-MoE-RP-Story\n\n\n\nDataset automatically created during the evaluation run of model Undi95/Mixtral-8x7B-MoE-RP-Story on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T21:32:27.266201(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Undi95/Mixtral-8x7B-MoE-RP-Story\n\n\n\nDataset automatically created during the evaluation run of model Undi95/Mixtral-8x7B-MoE-RP-Story on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T21:32:27.266201(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 199, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Undi95/Mixtral-8x7B-MoE-RP-Story\n\n\n\nDataset automatically created during the evaluation run of model Undi95/Mixtral-8x7B-MoE-RP-Story on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T21:32:27.266201(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
9afda3476e0209a5bf566cb3cf9230e63be14edc
# Dataset Card for Evaluation run of perlthoughts/Falkor-8x7B-MoE <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [perlthoughts/Falkor-8x7B-MoE](https://huggingface.co/perlthoughts/Falkor-8x7B-MoE) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_perlthoughts__Falkor-8x7B-MoE", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T21:48:58.361135](https://huggingface.co/datasets/open-llm-leaderboard/details_perlthoughts__Falkor-8x7B-MoE/blob/main/results_2023-12-16T21-48-58.361135.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6436792323952398, "acc_stderr": 0.0322025695700015, "acc_norm": 0.6451930910788192, "acc_norm_stderr": 0.03285268737187084, "mc1": 0.36474908200734396, "mc1_stderr": 0.01685096106172012, "mc2": 0.5350238552317648, "mc2_stderr": 0.015383683041808177 }, "harness|arc:challenge|25": { "acc": 0.6356655290102389, "acc_stderr": 0.014063260279882419, "acc_norm": 0.6629692832764505, "acc_norm_stderr": 0.013813476652902276 }, "harness|hellaswag|10": { "acc": 0.6665006970722963, "acc_stderr": 0.004704996294145036, "acc_norm": 0.8503286197968533, "acc_norm_stderr": 0.0035601991854865575 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6370370370370371, "acc_stderr": 0.04153948404742398, "acc_norm": 0.6370370370370371, "acc_norm_stderr": 0.04153948404742398 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6973684210526315, "acc_stderr": 0.037385206761196686, "acc_norm": 0.6973684210526315, "acc_norm_stderr": 0.037385206761196686 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7358490566037735, "acc_stderr": 0.027134291628741702, "acc_norm": 0.7358490566037735, "acc_norm_stderr": 0.027134291628741702 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7847222222222222, "acc_stderr": 0.03437079344106135, "acc_norm": 0.7847222222222222, "acc_norm_stderr": 0.03437079344106135 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6705202312138728, "acc_stderr": 0.03583901754736412, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.03583901754736412 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.37254901960784315, "acc_stderr": 0.048108401480826346, "acc_norm": 0.37254901960784315, "acc_norm_stderr": 0.048108401480826346 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5446808510638298, "acc_stderr": 0.03255525359340355, "acc_norm": 0.5446808510638298, "acc_norm_stderr": 0.03255525359340355 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4649122807017544, "acc_stderr": 0.046920083813689104, "acc_norm": 0.4649122807017544, "acc_norm_stderr": 0.046920083813689104 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5793103448275863, "acc_stderr": 0.0411391498118926, "acc_norm": 0.5793103448275863, "acc_norm_stderr": 0.0411391498118926 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3915343915343915, "acc_stderr": 0.02513809138885111, "acc_norm": 0.3915343915343915, "acc_norm_stderr": 0.02513809138885111 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42063492063492064, "acc_stderr": 0.04415438226743744, "acc_norm": 0.42063492063492064, "acc_norm_stderr": 0.04415438226743744 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7870967741935484, "acc_stderr": 0.02328766512726853, "acc_norm": 0.7870967741935484, "acc_norm_stderr": 0.02328766512726853 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.541871921182266, "acc_stderr": 0.03505630140785741, "acc_norm": 0.541871921182266, "acc_norm_stderr": 0.03505630140785741 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.67, "acc_stderr": 0.04725815626252607, "acc_norm": 0.67, "acc_norm_stderr": 0.04725815626252607 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7818181818181819, "acc_stderr": 0.03225078108306289, "acc_norm": 0.7818181818181819, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8080808080808081, "acc_stderr": 0.02805779167298902, "acc_norm": 0.8080808080808081, "acc_norm_stderr": 0.02805779167298902 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8963730569948186, "acc_stderr": 0.02199531196364424, "acc_norm": 0.8963730569948186, "acc_norm_stderr": 0.02199531196364424 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6538461538461539, "acc_stderr": 0.024121125416941197, "acc_norm": 0.6538461538461539, "acc_norm_stderr": 0.024121125416941197 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.028742040903948485, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.028742040903948485 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7142857142857143, "acc_stderr": 0.029344572500634353, "acc_norm": 0.7142857142857143, "acc_norm_stderr": 0.029344572500634353 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33112582781456956, "acc_stderr": 0.038425817186598696, "acc_norm": 0.33112582781456956, "acc_norm_stderr": 0.038425817186598696 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8311926605504587, "acc_stderr": 0.01606005626853033, "acc_norm": 0.8311926605504587, "acc_norm_stderr": 0.01606005626853033 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5138888888888888, "acc_stderr": 0.03408655867977749, "acc_norm": 0.5138888888888888, "acc_norm_stderr": 0.03408655867977749 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7794117647058824, "acc_stderr": 0.02910225438967408, "acc_norm": 0.7794117647058824, "acc_norm_stderr": 0.02910225438967408 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.810126582278481, "acc_stderr": 0.025530100460233494, "acc_norm": 0.810126582278481, "acc_norm_stderr": 0.025530100460233494 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.695067264573991, "acc_stderr": 0.030898610882477515, "acc_norm": 0.695067264573991, "acc_norm_stderr": 0.030898610882477515 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7557251908396947, "acc_stderr": 0.03768335959728744, "acc_norm": 0.7557251908396947, "acc_norm_stderr": 0.03768335959728744 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7768595041322314, "acc_stderr": 0.03800754475228732, "acc_norm": 0.7768595041322314, "acc_norm_stderr": 0.03800754475228732 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7685185185185185, "acc_stderr": 0.04077494709252627, "acc_norm": 0.7685185185185185, "acc_norm_stderr": 0.04077494709252627 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.754601226993865, "acc_stderr": 0.03380939813943354, "acc_norm": 0.754601226993865, "acc_norm_stderr": 0.03380939813943354 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4732142857142857, "acc_stderr": 0.047389751192741546, "acc_norm": 0.4732142857142857, "acc_norm_stderr": 0.047389751192741546 }, "harness|hendrycksTest-management|5": { "acc": 0.7961165048543689, "acc_stderr": 0.0398913985953177, "acc_norm": 0.7961165048543689, "acc_norm_stderr": 0.0398913985953177 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8717948717948718, "acc_stderr": 0.021901905115073325, "acc_norm": 0.8717948717948718, "acc_norm_stderr": 0.021901905115073325 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.73, "acc_stderr": 0.044619604333847394, "acc_norm": 0.73, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8109833971902938, "acc_stderr": 0.014000791294407003, "acc_norm": 0.8109833971902938, "acc_norm_stderr": 0.014000791294407003 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7225433526011561, "acc_stderr": 0.024105712607754307, "acc_norm": 0.7225433526011561, "acc_norm_stderr": 0.024105712607754307 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.37318435754189944, "acc_stderr": 0.016175692013381954, "acc_norm": 0.37318435754189944, "acc_norm_stderr": 0.016175692013381954 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.738562091503268, "acc_stderr": 0.025160998214292456, "acc_norm": 0.738562091503268, "acc_norm_stderr": 0.025160998214292456 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7266881028938906, "acc_stderr": 0.02531176597542612, "acc_norm": 0.7266881028938906, "acc_norm_stderr": 0.02531176597542612 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7253086419753086, "acc_stderr": 0.02483605786829467, "acc_norm": 0.7253086419753086, "acc_norm_stderr": 0.02483605786829467 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.48936170212765956, "acc_stderr": 0.029820747191422473, "acc_norm": 0.48936170212765956, "acc_norm_stderr": 0.029820747191422473 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4758800521512386, "acc_stderr": 0.01275536872286394, "acc_norm": 0.4758800521512386, "acc_norm_stderr": 0.01275536872286394 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6764705882352942, "acc_stderr": 0.028418208619406755, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.028418208619406755 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6781045751633987, "acc_stderr": 0.018901015322093092, "acc_norm": 0.6781045751633987, "acc_norm_stderr": 0.018901015322093092 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6818181818181818, "acc_stderr": 0.044612721759105085, "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.044612721759105085 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7061224489795919, "acc_stderr": 0.02916273841024977, "acc_norm": 0.7061224489795919, "acc_norm_stderr": 0.02916273841024977 }, "harness|hendrycksTest-sociology|5": { "acc": 0.835820895522388, "acc_stderr": 0.026193923544454125, "acc_norm": 0.835820895522388, "acc_norm_stderr": 0.026193923544454125 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.03487350880197771, "acc_norm": 0.86, "acc_norm_stderr": 0.03487350880197771 }, "harness|hendrycksTest-virology|5": { "acc": 0.5542168674698795, "acc_stderr": 0.03869543323472101, "acc_norm": 0.5542168674698795, "acc_norm_stderr": 0.03869543323472101 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8245614035087719, "acc_stderr": 0.029170885500727682, "acc_norm": 0.8245614035087719, "acc_norm_stderr": 0.029170885500727682 }, "harness|truthfulqa:mc|0": { "mc1": 0.36474908200734396, "mc1_stderr": 0.01685096106172012, "mc2": 0.5350238552317648, "mc2_stderr": 0.015383683041808177 }, "harness|winogrande|5": { "acc": 0.8018942383583267, "acc_stderr": 0.011201862744487048 }, "harness|gsm8k|5": { "acc": 0.6072782410917361, "acc_stderr": 0.013451745349586569 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_perlthoughts__Falkor-8x7B-MoE
[ "region:us" ]
2023-12-16T21:51:53+00:00
{"pretty_name": "Evaluation run of perlthoughts/Falkor-8x7B-MoE", "dataset_summary": "Dataset automatically created during the evaluation run of model [perlthoughts/Falkor-8x7B-MoE](https://huggingface.co/perlthoughts/Falkor-8x7B-MoE) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_perlthoughts__Falkor-8x7B-MoE\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T21:48:58.361135](https://huggingface.co/datasets/open-llm-leaderboard/details_perlthoughts__Falkor-8x7B-MoE/blob/main/results_2023-12-16T21-48-58.361135.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6436792323952398,\n \"acc_stderr\": 0.0322025695700015,\n \"acc_norm\": 0.6451930910788192,\n \"acc_norm_stderr\": 0.03285268737187084,\n \"mc1\": 0.36474908200734396,\n \"mc1_stderr\": 0.01685096106172012,\n \"mc2\": 0.5350238552317648,\n \"mc2_stderr\": 0.015383683041808177\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6356655290102389,\n \"acc_stderr\": 0.014063260279882419,\n \"acc_norm\": 0.6629692832764505,\n \"acc_norm_stderr\": 0.013813476652902276\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6665006970722963,\n \"acc_stderr\": 0.004704996294145036,\n \"acc_norm\": 0.8503286197968533,\n \"acc_norm_stderr\": 0.0035601991854865575\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6370370370370371,\n \"acc_stderr\": 0.04153948404742398,\n \"acc_norm\": 0.6370370370370371,\n \"acc_norm_stderr\": 0.04153948404742398\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6973684210526315,\n \"acc_stderr\": 0.037385206761196686,\n \"acc_norm\": 0.6973684210526315,\n \"acc_norm_stderr\": 0.037385206761196686\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7358490566037735,\n \"acc_stderr\": 0.027134291628741702,\n \"acc_norm\": 0.7358490566037735,\n \"acc_norm_stderr\": 0.027134291628741702\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7847222222222222,\n \"acc_stderr\": 0.03437079344106135,\n \"acc_norm\": 0.7847222222222222,\n \"acc_norm_stderr\": 0.03437079344106135\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.03583901754736412,\n \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.03583901754736412\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.37254901960784315,\n \"acc_stderr\": 0.048108401480826346,\n \"acc_norm\": 0.37254901960784315,\n \"acc_norm_stderr\": 0.048108401480826346\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5446808510638298,\n \"acc_stderr\": 0.03255525359340355,\n \"acc_norm\": 0.5446808510638298,\n \"acc_norm_stderr\": 0.03255525359340355\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4649122807017544,\n \"acc_stderr\": 0.046920083813689104,\n \"acc_norm\": 0.4649122807017544,\n \"acc_norm_stderr\": 0.046920083813689104\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5793103448275863,\n \"acc_stderr\": 0.0411391498118926,\n \"acc_norm\": 0.5793103448275863,\n \"acc_norm_stderr\": 0.0411391498118926\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3915343915343915,\n \"acc_stderr\": 0.02513809138885111,\n \"acc_norm\": 0.3915343915343915,\n \"acc_norm_stderr\": 0.02513809138885111\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42063492063492064,\n \"acc_stderr\": 0.04415438226743744,\n \"acc_norm\": 0.42063492063492064,\n \"acc_norm_stderr\": 0.04415438226743744\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7870967741935484,\n \"acc_stderr\": 0.02328766512726853,\n \"acc_norm\": 0.7870967741935484,\n \"acc_norm_stderr\": 0.02328766512726853\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.541871921182266,\n \"acc_stderr\": 0.03505630140785741,\n \"acc_norm\": 0.541871921182266,\n \"acc_norm_stderr\": 0.03505630140785741\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.04725815626252607,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.04725815626252607\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8080808080808081,\n \"acc_stderr\": 0.02805779167298902,\n \"acc_norm\": 0.8080808080808081,\n \"acc_norm_stderr\": 0.02805779167298902\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.02199531196364424,\n \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.02199531196364424\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6538461538461539,\n \"acc_stderr\": 0.024121125416941197,\n \"acc_norm\": 0.6538461538461539,\n \"acc_norm_stderr\": 0.024121125416941197\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.028742040903948485,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.028742040903948485\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7142857142857143,\n \"acc_stderr\": 0.029344572500634353,\n \"acc_norm\": 0.7142857142857143,\n \"acc_norm_stderr\": 0.029344572500634353\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33112582781456956,\n \"acc_stderr\": 0.038425817186598696,\n \"acc_norm\": 0.33112582781456956,\n \"acc_norm_stderr\": 0.038425817186598696\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8311926605504587,\n \"acc_stderr\": 0.01606005626853033,\n \"acc_norm\": 0.8311926605504587,\n \"acc_norm_stderr\": 0.01606005626853033\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5138888888888888,\n \"acc_stderr\": 0.03408655867977749,\n \"acc_norm\": 0.5138888888888888,\n \"acc_norm_stderr\": 0.03408655867977749\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7794117647058824,\n \"acc_stderr\": 0.02910225438967408,\n \"acc_norm\": 0.7794117647058824,\n \"acc_norm_stderr\": 0.02910225438967408\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.810126582278481,\n \"acc_stderr\": 0.025530100460233494,\n \"acc_norm\": 0.810126582278481,\n \"acc_norm_stderr\": 0.025530100460233494\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.695067264573991,\n \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7557251908396947,\n \"acc_stderr\": 0.03768335959728744,\n \"acc_norm\": 0.7557251908396947,\n \"acc_norm_stderr\": 0.03768335959728744\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228732,\n \"acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228732\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7685185185185185,\n \"acc_stderr\": 0.04077494709252627,\n \"acc_norm\": 0.7685185185185185,\n \"acc_norm_stderr\": 0.04077494709252627\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.754601226993865,\n \"acc_stderr\": 0.03380939813943354,\n \"acc_norm\": 0.754601226993865,\n \"acc_norm_stderr\": 0.03380939813943354\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4732142857142857,\n \"acc_stderr\": 0.047389751192741546,\n \"acc_norm\": 0.4732142857142857,\n \"acc_norm_stderr\": 0.047389751192741546\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7961165048543689,\n \"acc_stderr\": 0.0398913985953177,\n \"acc_norm\": 0.7961165048543689,\n \"acc_norm_stderr\": 0.0398913985953177\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8717948717948718,\n \"acc_stderr\": 0.021901905115073325,\n \"acc_norm\": 0.8717948717948718,\n \"acc_norm_stderr\": 0.021901905115073325\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8109833971902938,\n \"acc_stderr\": 0.014000791294407003,\n \"acc_norm\": 0.8109833971902938,\n \"acc_norm_stderr\": 0.014000791294407003\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7225433526011561,\n \"acc_stderr\": 0.024105712607754307,\n \"acc_norm\": 0.7225433526011561,\n \"acc_norm_stderr\": 0.024105712607754307\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.37318435754189944,\n \"acc_stderr\": 0.016175692013381954,\n \"acc_norm\": 0.37318435754189944,\n \"acc_norm_stderr\": 0.016175692013381954\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.738562091503268,\n \"acc_stderr\": 0.025160998214292456,\n \"acc_norm\": 0.738562091503268,\n \"acc_norm_stderr\": 0.025160998214292456\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7266881028938906,\n \"acc_stderr\": 0.02531176597542612,\n \"acc_norm\": 0.7266881028938906,\n \"acc_norm_stderr\": 0.02531176597542612\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7253086419753086,\n \"acc_stderr\": 0.02483605786829467,\n \"acc_norm\": 0.7253086419753086,\n \"acc_norm_stderr\": 0.02483605786829467\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.48936170212765956,\n \"acc_stderr\": 0.029820747191422473,\n \"acc_norm\": 0.48936170212765956,\n \"acc_norm_stderr\": 0.029820747191422473\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4758800521512386,\n \"acc_stderr\": 0.01275536872286394,\n \"acc_norm\": 0.4758800521512386,\n \"acc_norm_stderr\": 0.01275536872286394\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.028418208619406755,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.028418208619406755\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6781045751633987,\n \"acc_stderr\": 0.018901015322093092,\n \"acc_norm\": 0.6781045751633987,\n \"acc_norm_stderr\": 0.018901015322093092\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6818181818181818,\n \"acc_stderr\": 0.044612721759105085,\n \"acc_norm\": 0.6818181818181818,\n \"acc_norm_stderr\": 0.044612721759105085\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7061224489795919,\n \"acc_stderr\": 0.02916273841024977,\n \"acc_norm\": 0.7061224489795919,\n \"acc_norm_stderr\": 0.02916273841024977\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.026193923544454125,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.026193923544454125\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.03487350880197771,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.03487350880197771\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5542168674698795,\n \"acc_stderr\": 0.03869543323472101,\n \"acc_norm\": 0.5542168674698795,\n \"acc_norm_stderr\": 0.03869543323472101\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8245614035087719,\n \"acc_stderr\": 0.029170885500727682,\n \"acc_norm\": 0.8245614035087719,\n \"acc_norm_stderr\": 0.029170885500727682\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.36474908200734396,\n \"mc1_stderr\": 0.01685096106172012,\n \"mc2\": 0.5350238552317648,\n \"mc2_stderr\": 0.015383683041808177\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8018942383583267,\n \"acc_stderr\": 0.011201862744487048\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6072782410917361,\n \"acc_stderr\": 0.013451745349586569\n }\n}\n```", "repo_url": "https://huggingface.co/perlthoughts/Falkor-8x7B-MoE", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|arc:challenge|25_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|gsm8k|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hellaswag|10_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T21-48-58.361135.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["**/details_harness|winogrande|5_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T21-48-58.361135.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T21_48_58.361135", "path": ["results_2023-12-16T21-48-58.361135.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T21-48-58.361135.parquet"]}]}]}
2023-12-16T21:52:38+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of perlthoughts/Falkor-8x7B-MoE Dataset automatically created during the evaluation run of model perlthoughts/Falkor-8x7B-MoE on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T21:48:58.361135(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of perlthoughts/Falkor-8x7B-MoE\n\n\n\nDataset automatically created during the evaluation run of model perlthoughts/Falkor-8x7B-MoE on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T21:48:58.361135(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of perlthoughts/Falkor-8x7B-MoE\n\n\n\nDataset automatically created during the evaluation run of model perlthoughts/Falkor-8x7B-MoE on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T21:48:58.361135(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 191, 66, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of perlthoughts/Falkor-8x7B-MoE\n\n\n\nDataset automatically created during the evaluation run of model perlthoughts/Falkor-8x7B-MoE on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T21:48:58.361135(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
5fac72a50b29617fa3c301d7039a48ecfe573473
# Dataset Card for Evaluation run of adamo1139/Yi-34B-200K-AEZAKMI-v2 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [adamo1139/Yi-34B-200K-AEZAKMI-v2](https://huggingface.co/adamo1139/Yi-34B-200K-AEZAKMI-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_adamo1139__Yi-34B-200K-AEZAKMI-v2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T22:00:38.648825](https://huggingface.co/datasets/open-llm-leaderboard/details_adamo1139__Yi-34B-200K-AEZAKMI-v2/blob/main/results_2023-12-16T22-00-38.648825.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.7472785535366926, "acc_stderr": 0.028772452169713955, "acc_norm": 0.7527023693141784, "acc_norm_stderr": 0.029306663104184946, "mc1": 0.40024479804161567, "mc1_stderr": 0.017151605555749138, "mc2": 0.5674054536094885, "mc2_stderr": 0.015461253424328927 }, "harness|arc:challenge|25": { "acc": 0.6416382252559727, "acc_stderr": 0.014012883334859854, "acc_norm": 0.6791808873720137, "acc_norm_stderr": 0.01364094309194653 }, "harness|hellaswag|10": { "acc": 0.6630153355905198, "acc_stderr": 0.00471713572219417, "acc_norm": 0.8561043616809401, "acc_norm_stderr": 0.003502665674197166 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6888888888888889, "acc_stderr": 0.039992628766177214, "acc_norm": 0.6888888888888889, "acc_norm_stderr": 0.039992628766177214 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.8421052631578947, "acc_stderr": 0.029674167520101456, "acc_norm": 0.8421052631578947, "acc_norm_stderr": 0.029674167520101456 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.78, "acc_stderr": 0.04163331998932262, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932262 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.8113207547169812, "acc_stderr": 0.024079995130062253, "acc_norm": 0.8113207547169812, "acc_norm_stderr": 0.024079995130062253 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8611111111111112, "acc_stderr": 0.0289198029561349, "acc_norm": 0.8611111111111112, "acc_norm_stderr": 0.0289198029561349 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.44, "acc_stderr": 0.0498887651569859, "acc_norm": 0.44, "acc_norm_stderr": 0.0498887651569859 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.7341040462427746, "acc_stderr": 0.03368762932259432, "acc_norm": 0.7341040462427746, "acc_norm_stderr": 0.03368762932259432 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.5294117647058824, "acc_stderr": 0.049665709039785295, "acc_norm": 0.5294117647058824, "acc_norm_stderr": 0.049665709039785295 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.81, "acc_stderr": 0.039427724440366234, "acc_norm": 0.81, "acc_norm_stderr": 0.039427724440366234 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.7702127659574468, "acc_stderr": 0.02750175294441242, "acc_norm": 0.7702127659574468, "acc_norm_stderr": 0.02750175294441242 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.6228070175438597, "acc_stderr": 0.04559522141958216, "acc_norm": 0.6228070175438597, "acc_norm_stderr": 0.04559522141958216 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.7241379310344828, "acc_stderr": 0.037245636197746325, "acc_norm": 0.7241379310344828, "acc_norm_stderr": 0.037245636197746325 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.6798941798941799, "acc_stderr": 0.024026846392873502, "acc_norm": 0.6798941798941799, "acc_norm_stderr": 0.024026846392873502 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5, "acc_stderr": 0.04472135954999579, "acc_norm": 0.5, "acc_norm_stderr": 0.04472135954999579 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.9, "acc_stderr": 0.017066403719657255, "acc_norm": 0.9, "acc_norm_stderr": 0.017066403719657255 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.6699507389162561, "acc_stderr": 0.033085304262282574, "acc_norm": 0.6699507389162561, "acc_norm_stderr": 0.033085304262282574 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.76, "acc_stderr": 0.04292346959909282, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909282 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8424242424242424, "acc_stderr": 0.028450388805284336, "acc_norm": 0.8424242424242424, "acc_norm_stderr": 0.028450388805284336 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.9191919191919192, "acc_stderr": 0.019417681889724536, "acc_norm": 0.9191919191919192, "acc_norm_stderr": 0.019417681889724536 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9637305699481865, "acc_stderr": 0.013492659751295153, "acc_norm": 0.9637305699481865, "acc_norm_stderr": 0.013492659751295153 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.782051282051282, "acc_stderr": 0.02093244577446319, "acc_norm": 0.782051282051282, "acc_norm_stderr": 0.02093244577446319 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3888888888888889, "acc_stderr": 0.029723278961476664, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.029723278961476664 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.8235294117647058, "acc_stderr": 0.02476290267805791, "acc_norm": 0.8235294117647058, "acc_norm_stderr": 0.02476290267805791 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.4966887417218543, "acc_stderr": 0.04082393379449654, "acc_norm": 0.4966887417218543, "acc_norm_stderr": 0.04082393379449654 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.9174311926605505, "acc_stderr": 0.01180036136301657, "acc_norm": 0.9174311926605505, "acc_norm_stderr": 0.01180036136301657 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.6388888888888888, "acc_stderr": 0.032757734861009996, "acc_norm": 0.6388888888888888, "acc_norm_stderr": 0.032757734861009996 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9166666666666666, "acc_stderr": 0.019398452135813905, "acc_norm": 0.9166666666666666, "acc_norm_stderr": 0.019398452135813905 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.9071729957805907, "acc_stderr": 0.018889750550956715, "acc_norm": 0.9071729957805907, "acc_norm_stderr": 0.018889750550956715 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7847533632286996, "acc_stderr": 0.027584066602208274, "acc_norm": 0.7847533632286996, "acc_norm_stderr": 0.027584066602208274 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8702290076335878, "acc_stderr": 0.029473649496907065, "acc_norm": 0.8702290076335878, "acc_norm_stderr": 0.029473649496907065 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8677685950413223, "acc_stderr": 0.0309227883204458, "acc_norm": 0.8677685950413223, "acc_norm_stderr": 0.0309227883204458 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8703703703703703, "acc_stderr": 0.03247224389917948, "acc_norm": 0.8703703703703703, "acc_norm_stderr": 0.03247224389917948 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8895705521472392, "acc_stderr": 0.024624937788941318, "acc_norm": 0.8895705521472392, "acc_norm_stderr": 0.024624937788941318 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5357142857142857, "acc_stderr": 0.04733667890053756, "acc_norm": 0.5357142857142857, "acc_norm_stderr": 0.04733667890053756 }, "harness|hendrycksTest-management|5": { "acc": 0.8932038834951457, "acc_stderr": 0.030581088928331362, "acc_norm": 0.8932038834951457, "acc_norm_stderr": 0.030581088928331362 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9358974358974359, "acc_stderr": 0.016046261631673137, "acc_norm": 0.9358974358974359, "acc_norm_stderr": 0.016046261631673137 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.87, "acc_stderr": 0.03379976689896309, "acc_norm": 0.87, "acc_norm_stderr": 0.03379976689896309 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.913154533844189, "acc_stderr": 0.010070298377747786, "acc_norm": 0.913154533844189, "acc_norm_stderr": 0.010070298377747786 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.8265895953757225, "acc_stderr": 0.020383229551135033, "acc_norm": 0.8265895953757225, "acc_norm_stderr": 0.020383229551135033 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.7273743016759776, "acc_stderr": 0.014893391735249603, "acc_norm": 0.7273743016759776, "acc_norm_stderr": 0.014893391735249603 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.8496732026143791, "acc_stderr": 0.020464175124332618, "acc_norm": 0.8496732026143791, "acc_norm_stderr": 0.020464175124332618 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.797427652733119, "acc_stderr": 0.02282731749105969, "acc_norm": 0.797427652733119, "acc_norm_stderr": 0.02282731749105969 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8487654320987654, "acc_stderr": 0.019935086092149886, "acc_norm": 0.8487654320987654, "acc_norm_stderr": 0.019935086092149886 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.6205673758865248, "acc_stderr": 0.028947338851614098, "acc_norm": 0.6205673758865248, "acc_norm_stderr": 0.028947338851614098 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5853976531942634, "acc_stderr": 0.012582597058908284, "acc_norm": 0.5853976531942634, "acc_norm_stderr": 0.012582597058908284 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.8235294117647058, "acc_stderr": 0.023157468308559328, "acc_norm": 0.8235294117647058, "acc_norm_stderr": 0.023157468308559328 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.803921568627451, "acc_stderr": 0.016062056421968635, "acc_norm": 0.803921568627451, "acc_norm_stderr": 0.016062056421968635 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7363636363636363, "acc_stderr": 0.04220224692971987, "acc_norm": 0.7363636363636363, "acc_norm_stderr": 0.04220224692971987 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8285714285714286, "acc_stderr": 0.02412746346265016, "acc_norm": 0.8285714285714286, "acc_norm_stderr": 0.02412746346265016 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8855721393034826, "acc_stderr": 0.022509345325101716, "acc_norm": 0.8855721393034826, "acc_norm_stderr": 0.022509345325101716 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.9, "acc_stderr": 0.030151134457776334, "acc_norm": 0.9, "acc_norm_stderr": 0.030151134457776334 }, "harness|hendrycksTest-virology|5": { "acc": 0.5602409638554217, "acc_stderr": 0.03864139923699122, "acc_norm": 0.5602409638554217, "acc_norm_stderr": 0.03864139923699122 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8888888888888888, "acc_stderr": 0.02410338420207286, "acc_norm": 0.8888888888888888, "acc_norm_stderr": 0.02410338420207286 }, "harness|truthfulqa:mc|0": { "mc1": 0.40024479804161567, "mc1_stderr": 0.017151605555749138, "mc2": 0.5674054536094885, "mc2_stderr": 0.015461253424328927 }, "harness|winogrande|5": { "acc": 0.8161010260457774, "acc_stderr": 0.01088791601330589 }, "harness|gsm8k|5": { "acc": 0.5890826383623957, "acc_stderr": 0.013552132901423215 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_adamo1139__Yi-34B-200K-AEZAKMI-v2
[ "region:us" ]
2023-12-16T22:03:25+00:00
{"pretty_name": "Evaluation run of adamo1139/Yi-34B-200K-AEZAKMI-v2", "dataset_summary": "Dataset automatically created during the evaluation run of model [adamo1139/Yi-34B-200K-AEZAKMI-v2](https://huggingface.co/adamo1139/Yi-34B-200K-AEZAKMI-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_adamo1139__Yi-34B-200K-AEZAKMI-v2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T22:00:38.648825](https://huggingface.co/datasets/open-llm-leaderboard/details_adamo1139__Yi-34B-200K-AEZAKMI-v2/blob/main/results_2023-12-16T22-00-38.648825.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7472785535366926,\n \"acc_stderr\": 0.028772452169713955,\n \"acc_norm\": 0.7527023693141784,\n \"acc_norm_stderr\": 0.029306663104184946,\n \"mc1\": 0.40024479804161567,\n \"mc1_stderr\": 0.017151605555749138,\n \"mc2\": 0.5674054536094885,\n \"mc2_stderr\": 0.015461253424328927\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6416382252559727,\n \"acc_stderr\": 0.014012883334859854,\n \"acc_norm\": 0.6791808873720137,\n \"acc_norm_stderr\": 0.01364094309194653\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6630153355905198,\n \"acc_stderr\": 0.00471713572219417,\n \"acc_norm\": 0.8561043616809401,\n \"acc_norm_stderr\": 0.003502665674197166\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6888888888888889,\n \"acc_stderr\": 0.039992628766177214,\n \"acc_norm\": 0.6888888888888889,\n \"acc_norm_stderr\": 0.039992628766177214\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.8421052631578947,\n \"acc_stderr\": 0.029674167520101456,\n \"acc_norm\": 0.8421052631578947,\n \"acc_norm_stderr\": 0.029674167520101456\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932262,\n \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.04163331998932262\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.8113207547169812,\n \"acc_stderr\": 0.024079995130062253,\n \"acc_norm\": 0.8113207547169812,\n \"acc_norm_stderr\": 0.024079995130062253\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8611111111111112,\n \"acc_stderr\": 0.0289198029561349,\n \"acc_norm\": 0.8611111111111112,\n \"acc_norm_stderr\": 0.0289198029561349\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.0498887651569859,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.0498887651569859\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7341040462427746,\n \"acc_stderr\": 0.03368762932259432,\n \"acc_norm\": 0.7341040462427746,\n \"acc_norm_stderr\": 0.03368762932259432\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.5294117647058824,\n \"acc_stderr\": 0.049665709039785295,\n \"acc_norm\": 0.5294117647058824,\n \"acc_norm_stderr\": 0.049665709039785295\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.81,\n \"acc_stderr\": 0.039427724440366234,\n \"acc_norm\": 0.81,\n \"acc_norm_stderr\": 0.039427724440366234\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.7702127659574468,\n \"acc_stderr\": 0.02750175294441242,\n \"acc_norm\": 0.7702127659574468,\n \"acc_norm_stderr\": 0.02750175294441242\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.6228070175438597,\n \"acc_stderr\": 0.04559522141958216,\n \"acc_norm\": 0.6228070175438597,\n \"acc_norm_stderr\": 0.04559522141958216\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.7241379310344828,\n \"acc_stderr\": 0.037245636197746325,\n \"acc_norm\": 0.7241379310344828,\n \"acc_norm_stderr\": 0.037245636197746325\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.6798941798941799,\n \"acc_stderr\": 0.024026846392873502,\n \"acc_norm\": 0.6798941798941799,\n \"acc_norm_stderr\": 0.024026846392873502\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.04472135954999579,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.04472135954999579\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.9,\n \"acc_stderr\": 0.017066403719657255,\n \"acc_norm\": 0.9,\n \"acc_norm_stderr\": 0.017066403719657255\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.6699507389162561,\n \"acc_stderr\": 0.033085304262282574,\n \"acc_norm\": 0.6699507389162561,\n \"acc_norm_stderr\": 0.033085304262282574\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909282,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909282\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8424242424242424,\n \"acc_stderr\": 0.028450388805284336,\n \"acc_norm\": 0.8424242424242424,\n \"acc_norm_stderr\": 0.028450388805284336\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.9191919191919192,\n \"acc_stderr\": 0.019417681889724536,\n \"acc_norm\": 0.9191919191919192,\n \"acc_norm_stderr\": 0.019417681889724536\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9637305699481865,\n \"acc_stderr\": 0.013492659751295153,\n \"acc_norm\": 0.9637305699481865,\n \"acc_norm_stderr\": 0.013492659751295153\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.782051282051282,\n \"acc_stderr\": 0.02093244577446319,\n \"acc_norm\": 0.782051282051282,\n \"acc_norm_stderr\": 0.02093244577446319\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3888888888888889,\n \"acc_stderr\": 0.029723278961476664,\n \"acc_norm\": 0.3888888888888889,\n \"acc_norm_stderr\": 0.029723278961476664\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.8235294117647058,\n \"acc_stderr\": 0.02476290267805791,\n \"acc_norm\": 0.8235294117647058,\n \"acc_norm_stderr\": 0.02476290267805791\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.4966887417218543,\n \"acc_stderr\": 0.04082393379449654,\n \"acc_norm\": 0.4966887417218543,\n \"acc_norm_stderr\": 0.04082393379449654\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.9174311926605505,\n \"acc_stderr\": 0.01180036136301657,\n \"acc_norm\": 0.9174311926605505,\n \"acc_norm_stderr\": 0.01180036136301657\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.6388888888888888,\n \"acc_stderr\": 0.032757734861009996,\n \"acc_norm\": 0.6388888888888888,\n \"acc_norm_stderr\": 0.032757734861009996\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.9166666666666666,\n \"acc_stderr\": 0.019398452135813905,\n \"acc_norm\": 0.9166666666666666,\n \"acc_norm_stderr\": 0.019398452135813905\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.9071729957805907,\n \"acc_stderr\": 0.018889750550956715,\n \"acc_norm\": 0.9071729957805907,\n \"acc_norm_stderr\": 0.018889750550956715\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7847533632286996,\n \"acc_stderr\": 0.027584066602208274,\n \"acc_norm\": 0.7847533632286996,\n \"acc_norm_stderr\": 0.027584066602208274\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8702290076335878,\n \"acc_stderr\": 0.029473649496907065,\n \"acc_norm\": 0.8702290076335878,\n \"acc_norm_stderr\": 0.029473649496907065\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8677685950413223,\n \"acc_stderr\": 0.0309227883204458,\n \"acc_norm\": 0.8677685950413223,\n \"acc_norm_stderr\": 0.0309227883204458\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8703703703703703,\n \"acc_stderr\": 0.03247224389917948,\n \"acc_norm\": 0.8703703703703703,\n \"acc_norm_stderr\": 0.03247224389917948\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.8895705521472392,\n \"acc_stderr\": 0.024624937788941318,\n \"acc_norm\": 0.8895705521472392,\n \"acc_norm_stderr\": 0.024624937788941318\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5357142857142857,\n \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.5357142857142857,\n \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8932038834951457,\n \"acc_stderr\": 0.030581088928331362,\n \"acc_norm\": 0.8932038834951457,\n \"acc_norm_stderr\": 0.030581088928331362\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9358974358974359,\n \"acc_stderr\": 0.016046261631673137,\n \"acc_norm\": 0.9358974358974359,\n \"acc_norm_stderr\": 0.016046261631673137\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.87,\n \"acc_stderr\": 0.03379976689896309,\n \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.03379976689896309\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.913154533844189,\n \"acc_stderr\": 0.010070298377747786,\n \"acc_norm\": 0.913154533844189,\n \"acc_norm_stderr\": 0.010070298377747786\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.8265895953757225,\n \"acc_stderr\": 0.020383229551135033,\n \"acc_norm\": 0.8265895953757225,\n \"acc_norm_stderr\": 0.020383229551135033\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.7273743016759776,\n \"acc_stderr\": 0.014893391735249603,\n \"acc_norm\": 0.7273743016759776,\n \"acc_norm_stderr\": 0.014893391735249603\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.8496732026143791,\n \"acc_stderr\": 0.020464175124332618,\n \"acc_norm\": 0.8496732026143791,\n \"acc_norm_stderr\": 0.020464175124332618\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.797427652733119,\n \"acc_stderr\": 0.02282731749105969,\n \"acc_norm\": 0.797427652733119,\n \"acc_norm_stderr\": 0.02282731749105969\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8487654320987654,\n \"acc_stderr\": 0.019935086092149886,\n \"acc_norm\": 0.8487654320987654,\n \"acc_norm_stderr\": 0.019935086092149886\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.6205673758865248,\n \"acc_stderr\": 0.028947338851614098,\n \"acc_norm\": 0.6205673758865248,\n \"acc_norm_stderr\": 0.028947338851614098\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5853976531942634,\n \"acc_stderr\": 0.012582597058908284,\n \"acc_norm\": 0.5853976531942634,\n \"acc_norm_stderr\": 0.012582597058908284\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.8235294117647058,\n \"acc_stderr\": 0.023157468308559328,\n \"acc_norm\": 0.8235294117647058,\n \"acc_norm_stderr\": 0.023157468308559328\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.803921568627451,\n \"acc_stderr\": 0.016062056421968635,\n \"acc_norm\": 0.803921568627451,\n \"acc_norm_stderr\": 0.016062056421968635\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7363636363636363,\n \"acc_stderr\": 0.04220224692971987,\n \"acc_norm\": 0.7363636363636363,\n \"acc_norm_stderr\": 0.04220224692971987\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.8285714285714286,\n \"acc_stderr\": 0.02412746346265016,\n \"acc_norm\": 0.8285714285714286,\n \"acc_norm_stderr\": 0.02412746346265016\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8855721393034826,\n \"acc_stderr\": 0.022509345325101716,\n \"acc_norm\": 0.8855721393034826,\n \"acc_norm_stderr\": 0.022509345325101716\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.9,\n \"acc_stderr\": 0.030151134457776334,\n \"acc_norm\": 0.9,\n \"acc_norm_stderr\": 0.030151134457776334\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5602409638554217,\n \"acc_stderr\": 0.03864139923699122,\n \"acc_norm\": 0.5602409638554217,\n \"acc_norm_stderr\": 0.03864139923699122\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8888888888888888,\n \"acc_stderr\": 0.02410338420207286,\n \"acc_norm\": 0.8888888888888888,\n \"acc_norm_stderr\": 0.02410338420207286\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.40024479804161567,\n \"mc1_stderr\": 0.017151605555749138,\n \"mc2\": 0.5674054536094885,\n \"mc2_stderr\": 0.015461253424328927\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8161010260457774,\n \"acc_stderr\": 0.01088791601330589\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5890826383623957,\n \"acc_stderr\": 0.013552132901423215\n }\n}\n```", "repo_url": "https://huggingface.co/adamo1139/Yi-34B-200K-AEZAKMI-v2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|arc:challenge|25_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|gsm8k|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hellaswag|10_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T22-00-38.648825.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["**/details_harness|winogrande|5_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T22-00-38.648825.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T22_00_38.648825", "path": ["results_2023-12-16T22-00-38.648825.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T22-00-38.648825.parquet"]}]}]}
2023-12-16T22:04:10+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of adamo1139/Yi-34B-200K-AEZAKMI-v2 Dataset automatically created during the evaluation run of model adamo1139/Yi-34B-200K-AEZAKMI-v2 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T22:00:38.648825(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of adamo1139/Yi-34B-200K-AEZAKMI-v2\n\n\n\nDataset automatically created during the evaluation run of model adamo1139/Yi-34B-200K-AEZAKMI-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T22:00:38.648825(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of adamo1139/Yi-34B-200K-AEZAKMI-v2\n\n\n\nDataset automatically created during the evaluation run of model adamo1139/Yi-34B-200K-AEZAKMI-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T22:00:38.648825(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 195, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of adamo1139/Yi-34B-200K-AEZAKMI-v2\n\n\n\nDataset automatically created during the evaluation run of model adamo1139/Yi-34B-200K-AEZAKMI-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T22:00:38.648825(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
1cbd2397d1474644b0e6b20abb08d4ddb2718721
# ObjectNet (ImageNet-1k Overlapping) A webp (lossless) encoded version of [ObjectNet-1.0](https://objectnet.dev/index.html) at original resolution, containing only the images for the 113 classes that overlap with ImageNet-1k classes. ## License / Usage Terms ObjectNet is free to use for both research and commercial applications. The authors own the source images and allow their use under a license derived from Creative Commons Attribution 4.0 with only two additional clauses. 1. **ObjectNet may never be used to tune the parameters of any model.** 2. **Any individual images from ObjectNet may only be posted to the web including their 1 pixel red border**. If you are using ObjectNet, please cite our work, the citation appears at the bottom of this page. Any derivative of ObjectNet must contain attribution as well. ## About ObjectNet What is ObjectNet? * A new kind of vision dataset borrowing the idea of controls from other areas of science. * No training set, only a test set! Put your vision system through its paces. * Collected to intentionally show objects from new viewpoints on new backgrounds. * 50,000 image test set, same as ImageNet, with controls for rotation, background, and viewpoint. * 313 object classes with 113 overlapping ImageNet * Large performance drop, what you can expect from vision systems in the real world! * Robust to fine-tuning and a very difficult transfer learning problem ## Why the Red Borders / How do I recognize if an image is in ObjectNet? As training sets become huge, the risk that test and training sets overlap is serious. We provide ObjectNet with a 2 pixel red border around each image which must be removed before performing inference. The ObjectNet license requires that if you post images from ObjectNet to the web, you include this border. Any time you see an image with a solid 2 pixel red border, that's an indication it's in someone's test set and you should be careful about training on it. Reverse image search will allow you to figure out which test set it is from. NOTE: original ObjectNet PNG files actually have a 2 pixel red border while their descriptions say 1. ## Preprocessing Steps for This timm Version 1. Re-encode PNG images with lossless WebP (~32% reduction in size), keeping red border. 2. Add `imagenet_labels` and `imagenet_synsets` consisting of lists of ImageNet-1k classes that overlap with ObjectNet class. 3. Remove all ObjectNet image classes without ImageNet-1k labels. ## Citation ```bibtex @incollection{NIPS2019_9142, title = {ObjectNet: A large-scale bias-controlled dataset for pushing the limits of object recognition models}, author = {Barbu, Andrei and Mayo, David and Alverio, Julian and Luo, William and Wang, Christopher and Gutfreund, Dan and Tenenbaum, Josh and Katz, Boris}, booktitle = {Advances in Neural Information Processing Systems 32}, editor = {H. Wallach and H. Larochelle and A. Beygelzimer and F. d\textquotesingle Alch\'{e}-Buc and E. Fox and R. Garnett}, pages = {9448--9458}, year = {2019}, publisher = {Curran Associates, Inc.}, url = {http://papers.nips.cc/paper/9142-objectnet-a-large-scale-bias-controlled-dataset-for-pushing-the-limits-of-object-recognition-models.pdf} } ```
timm/objectnet-in1k
[ "region:us" ]
2023-12-16T22:20:20+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "air_freshener", "1": "alarm_clock", "2": "backpack", "3": "baking_sheet", "4": "banana", "5": "band_aid", "6": "baseball_bat", "7": "baseball_glove", "8": "basket", "9": "bathrobe", "10": "battery", "11": "bed_sheet", "12": "beer_bottle", "13": "beer_can", "14": "belt", "15": "bench", "16": "bicycle", "17": "bike_pump", "18": "bills_money", "19": "binder_closed", "20": "biscuits", "21": "blanket", "22": "blender", "23": "blouse", "24": "board_game", "25": "book_closed", "26": "bookend", "27": "boots", "28": "bottle_cap", "29": "bottle_opener", "30": "bottle_stopper", "31": "box", "32": "bracelet", "33": "bread_knife", "34": "bread_loaf", "35": "briefcase", "36": "brooch", "37": "broom", "38": "bucket", "39": "butchers_knife", "40": "butter", "41": "button", "42": "calendar", "43": "can_opener", "44": "candle", "45": "canned_food", "46": "cd_case", "47": "cellphone", "48": "cellphone_case", "49": "cellphone_charger", "50": "cereal", "51": "chair", "52": "cheese", "53": "chess_piece", "54": "chocolate", "55": "chopstick", "56": "clothes_hamper", "57": "clothes_hanger", "58": "coaster", "59": "coffee_beans", "60": "coffee_french_press", "61": "coffee_grinder", "62": "coffee_machine", "63": "coffee_table", "64": "coin_money", "65": "comb", "66": "combination_lock", "67": "computer_mouse", "68": "contact_lens_case", "69": "cooking_oil_bottle", "70": "cork", "71": "cutting_board", "72": "deodorant", "73": "desk_lamp", "74": "detergent", "75": "dish_soap", "76": "document_folder_closed", "77": "dog_bed", "78": "doormat", "79": "drawer_open", "80": "dress", "81": "dress_pants", "82": "dress_shirt", "83": "dress_shoe_men", "84": "dress_shoe_women", "85": "drill", "86": "drinking_cup", "87": "drinking_straw", "88": "drying_rack_for_clothes", "89": "drying_rack_for_dishes", "90": "dust_pan", "91": "dvd_player", "92": "earbuds", "93": "earring", "94": "egg", "95": "egg_carton", "96": "envelope", "97": "eraser_white_board", "98": "extension_cable", "99": "eyeglasses", "100": "fan", "101": "figurine_or_statue", "102": "first_aid_kit", "103": "flashlight", "104": "floss_container", "105": "flour_container", "106": "fork", "107": "frying_pan", "108": "full_sized_towel", "109": "glue_container", "110": "hair_brush", "111": "hair_dryer", "112": "hairclip", "113": "hairtie", "114": "hammer", "115": "hand_mirror", "116": "hand_towel_or_rag", "117": "handbag", "118": "hat", "119": "headphones_over_ear", "120": "helmet", "121": "honey_container", "122": "ice", "123": "ice_cube_tray", "124": "iron_for_clothes", "125": "ironing_board", "126": "jam", "127": "jar", "128": "jeans", "129": "kettle", "130": "key_chain", "131": "keyboard", "132": "ladle", "133": "lampshade", "134": "laptop_charger", "135": "laptop_open", "136": "leaf", "137": "leggings", "138": "lemon", "139": "letter_opener", "140": "lettuce", "141": "light_bulb", "142": "lighter", "143": "lipstick", "144": "loofah", "145": "magazine", "146": "makeup", "147": "makeup_brush", "148": "marker", "149": "match", "150": "measuring_cup", "151": "microwave", "152": "milk", "153": "mixing_salad_bowl", "154": "monitor", "155": "mouse_pad", "156": "mouthwash", "157": "mug", "158": "multitool", "159": "nail_clippers", "160": "nail_fastener", "161": "nail_file", "162": "nail_polish", "163": "napkin", "164": "necklace", "165": "newspaper", "166": "night_light", "167": "nightstand", "168": "notebook", "169": "notepad", "170": "nut_for_screw", "171": "orange", "172": "oven_mitts", "173": "padlock", "174": "paint_can", "175": "paintbrush", "176": "paper", "177": "paper_bag", "178": "paper_plates", "179": "paper_towel", "180": "paperclip", "181": "peeler", "182": "pen", "183": "pencil", "184": "pepper_shaker", "185": "pet_food_container", "186": "phone_landline", "187": "photograph_printed", "188": "pill_bottle", "189": "pill_organizer", "190": "pillow", "191": "pitcher", "192": "placemat", "193": "plastic_bag", "194": "plastic_cup", "195": "plastic_wrap", "196": "plate", "197": "playing_cards", "198": "pliers", "199": "plunger", "200": "pop_can", "201": "portable_heater", "202": "poster", "203": "power_bar", "204": "power_cable", "205": "printer", "206": "raincoat", "207": "rake", "208": "razor", "209": "receipt", "210": "remote_control", "211": "removable_blade", "212": "ribbon", "213": "ring", "214": "rock", "215": "rolling_pin", "216": "ruler", "217": "running_shoe", "218": "safety_pin", "219": "salt_shaker", "220": "sandal", "221": "scarf", "222": "scissors", "223": "screw", "224": "scrub_brush", "225": "sewing_kit", "226": "shampoo_bottle", "227": "shoelace", "228": "shorts", "229": "shovel", "230": "skateboard", "231": "skirt", "232": "sleeping_bag", "233": "slipper", "234": "soap_bar", "235": "soap_dispenser", "236": "sock", "237": "soup_bowl", "238": "spatula", "239": "speaker", "240": "sponge", "241": "spoon", "242": "spray_bottle", "243": "squeegee", "244": "squeeze_bottle", "245": "standing_lamp", "246": "stapler", "247": "step_stool", "248": "still_camera", "249": "stopper_sink_tub", "250": "strainer", "251": "stuffed_animal", "252": "sugar_container", "253": "suit_jacket", "254": "suitcase", "255": "sunglasses", "256": "sweater", "257": "swimming_trunks", "258": "t-shirt", "259": "table_knife", "260": "tablecloth", "261": "tablet_ipad", "262": "tanktop", "263": "tape", "264": "tape_measure", "265": "tarp", "266": "teabag", "267": "teapot", "268": "tennis_racket", "269": "thermometer", "270": "thermos", "271": "throw_pillow", "272": "tie", "273": "tissue", "274": "toaster", "275": "toilet_paper_roll", "276": "tomato", "277": "tongs", "278": "toothbrush", "279": "toothpaste", "280": "tote_bag", "281": "toy", "282": "trash_bag", "283": "trash_bin", "284": "travel_case", "285": "tray", "286": "trophy", "287": "tv", "288": "tweezers", "289": "umbrella", "290": "usb_cable", "291": "usb_flash_drive", "292": "vacuum_cleaner", "293": "vase", "294": "video_camera", "295": "walker", "296": "walking_cane", "297": "wallet", "298": "watch", "299": "water_bottle", "300": "water_filter", "301": "webcam", "302": "weight_exercise", "303": "weight_scale", "304": "wheel", "305": "whisk", "306": "whistle", "307": "wine_bottle", "308": "wine_glass", "309": "winter_glove", "310": "wok", "311": "wrench", "312": "ziploc_bag"}}}}, {"name": "imagenet_labels", "sequence": "int64"}, {"name": "imagenet_synsets", "sequence": "string"}], "splits": [{"name": "test", "num_bytes": 45447504721.55699, "num_examples": 18574}], "download_size": 46256058381, "dataset_size": 45447504721.55699}, "configs": [{"config_name": "default", "data_files": [{"split": "test", "path": "data/test-*"}]}], "extra_gated_prompt": "By clicking on \u201cAccess repository\u201d below, you also agree to ObjectNet Terms: ObjectNet is free to use for both research and commercial applications. The authors own the source images and allow their use under a license derived from Creative Commons Attribution 4.0 with only two additional clauses.\n1. ObjectNet may never be used to tune the parameters of any model.\n2. Any individual images from ObjectNet may only be posted to the web including their 1 pixel red border.\nIf you are using ObjectNet, please cite our work, the citation appears at the bottom of this page. Any derivative of ObjectNet must contain attribution as well."}
2023-12-17T00:55:12+00:00
[]
[]
TAGS #region-us
# ObjectNet (ImageNet-1k Overlapping) A webp (lossless) encoded version of ObjectNet-1.0 at original resolution, containing only the images for the 113 classes that overlap with ImageNet-1k classes. ## License / Usage Terms ObjectNet is free to use for both research and commercial applications. The authors own the source images and allow their use under a license derived from Creative Commons Attribution 4.0 with only two additional clauses. 1. ObjectNet may never be used to tune the parameters of any model. 2. Any individual images from ObjectNet may only be posted to the web including their 1 pixel red border. If you are using ObjectNet, please cite our work, the citation appears at the bottom of this page. Any derivative of ObjectNet must contain attribution as well. ## About ObjectNet What is ObjectNet? * A new kind of vision dataset borrowing the idea of controls from other areas of science. * No training set, only a test set! Put your vision system through its paces. * Collected to intentionally show objects from new viewpoints on new backgrounds. * 50,000 image test set, same as ImageNet, with controls for rotation, background, and viewpoint. * 313 object classes with 113 overlapping ImageNet * Large performance drop, what you can expect from vision systems in the real world! * Robust to fine-tuning and a very difficult transfer learning problem ## Why the Red Borders / How do I recognize if an image is in ObjectNet? As training sets become huge, the risk that test and training sets overlap is serious. We provide ObjectNet with a 2 pixel red border around each image which must be removed before performing inference. The ObjectNet license requires that if you post images from ObjectNet to the web, you include this border. Any time you see an image with a solid 2 pixel red border, that's an indication it's in someone's test set and you should be careful about training on it. Reverse image search will allow you to figure out which test set it is from. NOTE: original ObjectNet PNG files actually have a 2 pixel red border while their descriptions say 1. ## Preprocessing Steps for This timm Version 1. Re-encode PNG images with lossless WebP (~32% reduction in size), keeping red border. 2. Add 'imagenet_labels' and 'imagenet_synsets' consisting of lists of ImageNet-1k classes that overlap with ObjectNet class. 3. Remove all ObjectNet image classes without ImageNet-1k labels.
[ "# ObjectNet (ImageNet-1k Overlapping)\n\nA webp (lossless) encoded version of ObjectNet-1.0 at original resolution, containing only the images for the 113 classes that overlap with ImageNet-1k classes.", "## License / Usage Terms\n\nObjectNet is free to use for both research and commercial applications. The authors own the source images and allow their use under a license derived from Creative Commons Attribution 4.0 with only two additional clauses.\n\n1. ObjectNet may never be used to tune the parameters of any model.\n2. Any individual images from ObjectNet may only be posted to the web including their 1 pixel red border.\nIf you are using ObjectNet, please cite our work, the citation appears at the bottom of this page. Any derivative of ObjectNet must contain attribution as well.", "## About ObjectNet\n\nWhat is ObjectNet?\n* A new kind of vision dataset borrowing the idea of controls from other areas of science.\n* No training set, only a test set! Put your vision system through its paces.\n* Collected to intentionally show objects from new viewpoints on new backgrounds.\n* 50,000 image test set, same as ImageNet, with controls for rotation, background, and viewpoint.\n* 313 object classes with 113 overlapping ImageNet\n* Large performance drop, what you can expect from vision systems in the real world!\n* Robust to fine-tuning and a very difficult transfer learning problem", "## Why the Red Borders / How do I recognize if an image is in ObjectNet?\n\nAs training sets become huge, the risk that test and training sets overlap is serious. We provide ObjectNet with a 2 pixel red border around each image which must be removed before performing inference. The ObjectNet license requires that if you post images from ObjectNet to the web, you include this border. Any time you see an image with a solid 2 pixel red border, that's an indication it's in someone's test set and you should be careful about training on it. Reverse image search will allow you to figure out which test set it is from.\n\nNOTE: original ObjectNet PNG files actually have a 2 pixel red border while their descriptions say 1.", "## Preprocessing Steps for This timm Version\n1. Re-encode PNG images with lossless WebP (~32% reduction in size), keeping red border.\n2. Add 'imagenet_labels' and 'imagenet_synsets' consisting of lists of ImageNet-1k classes that overlap with ObjectNet class.\n3. Remove all ObjectNet image classes without ImageNet-1k labels." ]
[ "TAGS\n#region-us \n", "# ObjectNet (ImageNet-1k Overlapping)\n\nA webp (lossless) encoded version of ObjectNet-1.0 at original resolution, containing only the images for the 113 classes that overlap with ImageNet-1k classes.", "## License / Usage Terms\n\nObjectNet is free to use for both research and commercial applications. The authors own the source images and allow their use under a license derived from Creative Commons Attribution 4.0 with only two additional clauses.\n\n1. ObjectNet may never be used to tune the parameters of any model.\n2. Any individual images from ObjectNet may only be posted to the web including their 1 pixel red border.\nIf you are using ObjectNet, please cite our work, the citation appears at the bottom of this page. Any derivative of ObjectNet must contain attribution as well.", "## About ObjectNet\n\nWhat is ObjectNet?\n* A new kind of vision dataset borrowing the idea of controls from other areas of science.\n* No training set, only a test set! Put your vision system through its paces.\n* Collected to intentionally show objects from new viewpoints on new backgrounds.\n* 50,000 image test set, same as ImageNet, with controls for rotation, background, and viewpoint.\n* 313 object classes with 113 overlapping ImageNet\n* Large performance drop, what you can expect from vision systems in the real world!\n* Robust to fine-tuning and a very difficult transfer learning problem", "## Why the Red Borders / How do I recognize if an image is in ObjectNet?\n\nAs training sets become huge, the risk that test and training sets overlap is serious. We provide ObjectNet with a 2 pixel red border around each image which must be removed before performing inference. The ObjectNet license requires that if you post images from ObjectNet to the web, you include this border. Any time you see an image with a solid 2 pixel red border, that's an indication it's in someone's test set and you should be careful about training on it. Reverse image search will allow you to figure out which test set it is from.\n\nNOTE: original ObjectNet PNG files actually have a 2 pixel red border while their descriptions say 1.", "## Preprocessing Steps for This timm Version\n1. Re-encode PNG images with lossless WebP (~32% reduction in size), keeping red border.\n2. Add 'imagenet_labels' and 'imagenet_synsets' consisting of lists of ImageNet-1k classes that overlap with ObjectNet class.\n3. Remove all ObjectNet image classes without ImageNet-1k labels." ]
[ 6, 52, 120, 138, 162, 89 ]
[ "passage: TAGS\n#region-us \n# ObjectNet (ImageNet-1k Overlapping)\n\nA webp (lossless) encoded version of ObjectNet-1.0 at original resolution, containing only the images for the 113 classes that overlap with ImageNet-1k classes.## License / Usage Terms\n\nObjectNet is free to use for both research and commercial applications. The authors own the source images and allow their use under a license derived from Creative Commons Attribution 4.0 with only two additional clauses.\n\n1. ObjectNet may never be used to tune the parameters of any model.\n2. Any individual images from ObjectNet may only be posted to the web including their 1 pixel red border.\nIf you are using ObjectNet, please cite our work, the citation appears at the bottom of this page. Any derivative of ObjectNet must contain attribution as well.## About ObjectNet\n\nWhat is ObjectNet?\n* A new kind of vision dataset borrowing the idea of controls from other areas of science.\n* No training set, only a test set! Put your vision system through its paces.\n* Collected to intentionally show objects from new viewpoints on new backgrounds.\n* 50,000 image test set, same as ImageNet, with controls for rotation, background, and viewpoint.\n* 313 object classes with 113 overlapping ImageNet\n* Large performance drop, what you can expect from vision systems in the real world!\n* Robust to fine-tuning and a very difficult transfer learning problem## Why the Red Borders / How do I recognize if an image is in ObjectNet?\n\nAs training sets become huge, the risk that test and training sets overlap is serious. We provide ObjectNet with a 2 pixel red border around each image which must be removed before performing inference. The ObjectNet license requires that if you post images from ObjectNet to the web, you include this border. Any time you see an image with a solid 2 pixel red border, that's an indication it's in someone's test set and you should be careful about training on it. Reverse image search will allow you to figure out which test set it is from.\n\nNOTE: original ObjectNet PNG files actually have a 2 pixel red border while their descriptions say 1." ]
93149c5c4a76148d009ae2622ade62ffe2ed1e3c
# Dataset Card for "openai_summarize_tldr_rbaseline" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
mnoukhov/openai_summarize_tldr_rbaseline
[ "region:us" ]
2023-12-16T23:27:23+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "valid", "path": "data/valid-*"}]}], "dataset_info": {"features": [{"name": "prompt", "dtype": "string"}, {"name": "label", "dtype": "string"}, {"name": "reward_baseline", "dtype": "float32"}], "splits": [{"name": "train", "num_bytes": 181611007, "num_examples": 116722}, {"name": "valid", "num_bytes": 3120685, "num_examples": 2000}], "download_size": 113145941, "dataset_size": 184731692}}
2023-12-16T23:27:30+00:00
[]
[]
TAGS #region-us
# Dataset Card for "openai_summarize_tldr_rbaseline" More Information needed
[ "# Dataset Card for \"openai_summarize_tldr_rbaseline\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"openai_summarize_tldr_rbaseline\"\n\nMore Information needed" ]
[ 6, 24 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"openai_summarize_tldr_rbaseline\"\n\nMore Information needed" ]
8ea7fbc4cda1b06bb37d3399b3cb54591886b6c5
# Dataset Card for Evaluation run of Undi95/Llamix2-Xwin-MoE-4x13B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Undi95/Llamix2-Xwin-MoE-4x13B](https://huggingface.co/Undi95/Llamix2-Xwin-MoE-4x13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Undi95__Llamix2-Xwin-MoE-4x13B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-16T23:47:16.165655](https://huggingface.co/datasets/open-llm-leaderboard/details_Undi95__Llamix2-Xwin-MoE-4x13B/blob/main/results_2023-12-16T23-47-16.165655.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5629553312475245, "acc_stderr": 0.03336826953256445, "acc_norm": 0.5676375484013418, "acc_norm_stderr": 0.03405257588510401, "mc1": 0.2729498164014688, "mc1_stderr": 0.015594753632006525, "mc2": 0.3963209435327923, "mc2_stderr": 0.014481742388552897 }, "harness|arc:challenge|25": { "acc": 0.5699658703071673, "acc_stderr": 0.014467631559137993, "acc_norm": 0.6040955631399317, "acc_norm_stderr": 0.01429122839353659 }, "harness|hellaswag|10": { "acc": 0.6299541923919538, "acc_stderr": 0.004818298991012551, "acc_norm": 0.8296156144194383, "acc_norm_stderr": 0.0037520176390837532 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.04512608598542129, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542129 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5259259259259259, "acc_stderr": 0.04313531696750575, "acc_norm": 0.5259259259259259, "acc_norm_stderr": 0.04313531696750575 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5526315789473685, "acc_stderr": 0.0404633688397825, "acc_norm": 0.5526315789473685, "acc_norm_stderr": 0.0404633688397825 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.55, "acc_stderr": 0.049999999999999996, "acc_norm": 0.55, "acc_norm_stderr": 0.049999999999999996 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6150943396226415, "acc_stderr": 0.02994649856769995, "acc_norm": 0.6150943396226415, "acc_norm_stderr": 0.02994649856769995 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6388888888888888, "acc_stderr": 0.040166600304512336, "acc_norm": 0.6388888888888888, "acc_norm_stderr": 0.040166600304512336 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.38, "acc_stderr": 0.04878317312145633, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145633 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5375722543352601, "acc_stderr": 0.0380168510452446, "acc_norm": 0.5375722543352601, "acc_norm_stderr": 0.0380168510452446 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.22549019607843138, "acc_stderr": 0.041583075330832865, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.041583075330832865 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.74, "acc_stderr": 0.04408440022768079, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768079 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.44680851063829785, "acc_stderr": 0.0325005368436584, "acc_norm": 0.44680851063829785, "acc_norm_stderr": 0.0325005368436584 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3157894736842105, "acc_stderr": 0.04372748290278007, "acc_norm": 0.3157894736842105, "acc_norm_stderr": 0.04372748290278007 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.496551724137931, "acc_stderr": 0.041665675771015785, "acc_norm": 0.496551724137931, "acc_norm_stderr": 0.041665675771015785 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.31216931216931215, "acc_stderr": 0.0238652068369726, "acc_norm": 0.31216931216931215, "acc_norm_stderr": 0.0238652068369726 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3412698412698413, "acc_stderr": 0.04240799327574925, "acc_norm": 0.3412698412698413, "acc_norm_stderr": 0.04240799327574925 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6741935483870968, "acc_stderr": 0.026662010578567107, "acc_norm": 0.6741935483870968, "acc_norm_stderr": 0.026662010578567107 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.458128078817734, "acc_stderr": 0.03505630140785741, "acc_norm": 0.458128078817734, "acc_norm_stderr": 0.03505630140785741 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836, "acc_norm": 0.58, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6727272727272727, "acc_stderr": 0.03663974994391245, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.03663974994391245 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.696969696969697, "acc_stderr": 0.03274287914026868, "acc_norm": 0.696969696969697, "acc_norm_stderr": 0.03274287914026868 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8238341968911918, "acc_stderr": 0.027493504244548057, "acc_norm": 0.8238341968911918, "acc_norm_stderr": 0.027493504244548057 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.49743589743589745, "acc_stderr": 0.025350672979412195, "acc_norm": 0.49743589743589745, "acc_norm_stderr": 0.025350672979412195 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.28888888888888886, "acc_stderr": 0.027634907264178544, "acc_norm": 0.28888888888888886, "acc_norm_stderr": 0.027634907264178544 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5714285714285714, "acc_stderr": 0.032145368597886394, "acc_norm": 0.5714285714285714, "acc_norm_stderr": 0.032145368597886394 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.03802039760107903, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.03802039760107903 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7688073394495413, "acc_stderr": 0.018075750241633146, "acc_norm": 0.7688073394495413, "acc_norm_stderr": 0.018075750241633146 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.42592592592592593, "acc_stderr": 0.033723432716530645, "acc_norm": 0.42592592592592593, "acc_norm_stderr": 0.033723432716530645 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7598039215686274, "acc_stderr": 0.02998373305591361, "acc_norm": 0.7598039215686274, "acc_norm_stderr": 0.02998373305591361 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7679324894514767, "acc_stderr": 0.027479744550808503, "acc_norm": 0.7679324894514767, "acc_norm_stderr": 0.027479744550808503 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6547085201793722, "acc_stderr": 0.03191100192835794, "acc_norm": 0.6547085201793722, "acc_norm_stderr": 0.03191100192835794 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6183206106870229, "acc_stderr": 0.042607351576445594, "acc_norm": 0.6183206106870229, "acc_norm_stderr": 0.042607351576445594 }, "harness|hendrycksTest-international_law|5": { "acc": 0.743801652892562, "acc_stderr": 0.039849796533028725, "acc_norm": 0.743801652892562, "acc_norm_stderr": 0.039849796533028725 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7222222222222222, "acc_stderr": 0.04330043749650743, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.04330043749650743 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6809815950920245, "acc_stderr": 0.03661997551073836, "acc_norm": 0.6809815950920245, "acc_norm_stderr": 0.03661997551073836 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.33035714285714285, "acc_stderr": 0.04464285714285714, "acc_norm": 0.33035714285714285, "acc_norm_stderr": 0.04464285714285714 }, "harness|hendrycksTest-management|5": { "acc": 0.7378640776699029, "acc_stderr": 0.04354631077260595, "acc_norm": 0.7378640776699029, "acc_norm_stderr": 0.04354631077260595 }, "harness|hendrycksTest-marketing|5": { "acc": 0.811965811965812, "acc_stderr": 0.02559819368665225, "acc_norm": 0.811965811965812, "acc_norm_stderr": 0.02559819368665225 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.55, "acc_stderr": 0.04999999999999999, "acc_norm": 0.55, "acc_norm_stderr": 0.04999999999999999 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7624521072796935, "acc_stderr": 0.015218733046150195, "acc_norm": 0.7624521072796935, "acc_norm_stderr": 0.015218733046150195 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.653179190751445, "acc_stderr": 0.025624723994030454, "acc_norm": 0.653179190751445, "acc_norm_stderr": 0.025624723994030454 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3888268156424581, "acc_stderr": 0.01630389953079613, "acc_norm": 0.3888268156424581, "acc_norm_stderr": 0.01630389953079613 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6274509803921569, "acc_stderr": 0.027684181883302898, "acc_norm": 0.6274509803921569, "acc_norm_stderr": 0.027684181883302898 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.662379421221865, "acc_stderr": 0.026858825879488544, "acc_norm": 0.662379421221865, "acc_norm_stderr": 0.026858825879488544 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6419753086419753, "acc_stderr": 0.026675611926037106, "acc_norm": 0.6419753086419753, "acc_norm_stderr": 0.026675611926037106 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.40070921985815605, "acc_stderr": 0.029233465745573083, "acc_norm": 0.40070921985815605, "acc_norm_stderr": 0.029233465745573083 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4211212516297262, "acc_stderr": 0.012610325733489905, "acc_norm": 0.4211212516297262, "acc_norm_stderr": 0.012610325733489905 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5294117647058824, "acc_stderr": 0.03032024326500413, "acc_norm": 0.5294117647058824, "acc_norm_stderr": 0.03032024326500413 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5637254901960784, "acc_stderr": 0.02006287424353913, "acc_norm": 0.5637254901960784, "acc_norm_stderr": 0.02006287424353913 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6272727272727273, "acc_stderr": 0.04631381319425465, "acc_norm": 0.6272727272727273, "acc_norm_stderr": 0.04631381319425465 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6489795918367347, "acc_stderr": 0.03055531675557364, "acc_norm": 0.6489795918367347, "acc_norm_stderr": 0.03055531675557364 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7711442786069652, "acc_stderr": 0.029705284056772432, "acc_norm": 0.7711442786069652, "acc_norm_stderr": 0.029705284056772432 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.03487350880197769, "acc_norm": 0.86, "acc_norm_stderr": 0.03487350880197769 }, "harness|hendrycksTest-virology|5": { "acc": 0.4939759036144578, "acc_stderr": 0.03892212195333047, "acc_norm": 0.4939759036144578, "acc_norm_stderr": 0.03892212195333047 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7719298245614035, "acc_stderr": 0.032180937956023566, "acc_norm": 0.7719298245614035, "acc_norm_stderr": 0.032180937956023566 }, "harness|truthfulqa:mc|0": { "mc1": 0.2729498164014688, "mc1_stderr": 0.015594753632006525, "mc2": 0.3963209435327923, "mc2_stderr": 0.014481742388552897 }, "harness|winogrande|5": { "acc": 0.7513812154696132, "acc_stderr": 0.012147314713403108 }, "harness|gsm8k|5": { "acc": 0.33206974981046244, "acc_stderr": 0.012972465034361873 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Undi95__Llamix2-Xwin-MoE-4x13B
[ "region:us" ]
2023-12-16T23:50:13+00:00
{"pretty_name": "Evaluation run of Undi95/Llamix2-Xwin-MoE-4x13B", "dataset_summary": "Dataset automatically created during the evaluation run of model [Undi95/Llamix2-Xwin-MoE-4x13B](https://huggingface.co/Undi95/Llamix2-Xwin-MoE-4x13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Undi95__Llamix2-Xwin-MoE-4x13B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-16T23:47:16.165655](https://huggingface.co/datasets/open-llm-leaderboard/details_Undi95__Llamix2-Xwin-MoE-4x13B/blob/main/results_2023-12-16T23-47-16.165655.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5629553312475245,\n \"acc_stderr\": 0.03336826953256445,\n \"acc_norm\": 0.5676375484013418,\n \"acc_norm_stderr\": 0.03405257588510401,\n \"mc1\": 0.2729498164014688,\n \"mc1_stderr\": 0.015594753632006525,\n \"mc2\": 0.3963209435327923,\n \"mc2_stderr\": 0.014481742388552897\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5699658703071673,\n \"acc_stderr\": 0.014467631559137993,\n \"acc_norm\": 0.6040955631399317,\n \"acc_norm_stderr\": 0.01429122839353659\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6299541923919538,\n \"acc_stderr\": 0.004818298991012551,\n \"acc_norm\": 0.8296156144194383,\n \"acc_norm_stderr\": 0.0037520176390837532\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542129,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542129\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5259259259259259,\n \"acc_stderr\": 0.04313531696750575,\n \"acc_norm\": 0.5259259259259259,\n \"acc_norm_stderr\": 0.04313531696750575\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5526315789473685,\n \"acc_stderr\": 0.0404633688397825,\n \"acc_norm\": 0.5526315789473685,\n \"acc_norm_stderr\": 0.0404633688397825\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.049999999999999996,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.049999999999999996\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6150943396226415,\n \"acc_stderr\": 0.02994649856769995,\n \"acc_norm\": 0.6150943396226415,\n \"acc_norm_stderr\": 0.02994649856769995\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6388888888888888,\n \"acc_stderr\": 0.040166600304512336,\n \"acc_norm\": 0.6388888888888888,\n \"acc_norm_stderr\": 0.040166600304512336\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145633,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145633\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5375722543352601,\n \"acc_stderr\": 0.0380168510452446,\n \"acc_norm\": 0.5375722543352601,\n \"acc_norm_stderr\": 0.0380168510452446\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.041583075330832865,\n \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.041583075330832865\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768079,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768079\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.44680851063829785,\n \"acc_stderr\": 0.0325005368436584,\n \"acc_norm\": 0.44680851063829785,\n \"acc_norm_stderr\": 0.0325005368436584\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3157894736842105,\n \"acc_stderr\": 0.04372748290278007,\n \"acc_norm\": 0.3157894736842105,\n \"acc_norm_stderr\": 0.04372748290278007\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.496551724137931,\n \"acc_stderr\": 0.041665675771015785,\n \"acc_norm\": 0.496551724137931,\n \"acc_norm_stderr\": 0.041665675771015785\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.31216931216931215,\n \"acc_stderr\": 0.0238652068369726,\n \"acc_norm\": 0.31216931216931215,\n \"acc_norm_stderr\": 0.0238652068369726\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3412698412698413,\n \"acc_stderr\": 0.04240799327574925,\n \"acc_norm\": 0.3412698412698413,\n \"acc_norm_stderr\": 0.04240799327574925\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6741935483870968,\n \"acc_stderr\": 0.026662010578567107,\n \"acc_norm\": 0.6741935483870968,\n \"acc_norm_stderr\": 0.026662010578567107\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.458128078817734,\n \"acc_stderr\": 0.03505630140785741,\n \"acc_norm\": 0.458128078817734,\n \"acc_norm_stderr\": 0.03505630140785741\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.03663974994391245,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.03663974994391245\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.696969696969697,\n \"acc_stderr\": 0.03274287914026868,\n \"acc_norm\": 0.696969696969697,\n \"acc_norm_stderr\": 0.03274287914026868\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8238341968911918,\n \"acc_stderr\": 0.027493504244548057,\n \"acc_norm\": 0.8238341968911918,\n \"acc_norm_stderr\": 0.027493504244548057\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.49743589743589745,\n \"acc_stderr\": 0.025350672979412195,\n \"acc_norm\": 0.49743589743589745,\n \"acc_norm_stderr\": 0.025350672979412195\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.28888888888888886,\n \"acc_stderr\": 0.027634907264178544,\n \"acc_norm\": 0.28888888888888886,\n \"acc_norm_stderr\": 0.027634907264178544\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.5714285714285714,\n \"acc_stderr\": 0.032145368597886394,\n \"acc_norm\": 0.5714285714285714,\n \"acc_norm_stderr\": 0.032145368597886394\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31788079470198677,\n \"acc_stderr\": 0.03802039760107903,\n \"acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.03802039760107903\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7688073394495413,\n \"acc_stderr\": 0.018075750241633146,\n \"acc_norm\": 0.7688073394495413,\n \"acc_norm_stderr\": 0.018075750241633146\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.42592592592592593,\n \"acc_stderr\": 0.033723432716530645,\n \"acc_norm\": 0.42592592592592593,\n \"acc_norm_stderr\": 0.033723432716530645\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7598039215686274,\n \"acc_stderr\": 0.02998373305591361,\n \"acc_norm\": 0.7598039215686274,\n \"acc_norm_stderr\": 0.02998373305591361\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7679324894514767,\n \"acc_stderr\": 0.027479744550808503,\n \"acc_norm\": 0.7679324894514767,\n \"acc_norm_stderr\": 0.027479744550808503\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6547085201793722,\n \"acc_stderr\": 0.03191100192835794,\n \"acc_norm\": 0.6547085201793722,\n \"acc_norm_stderr\": 0.03191100192835794\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6183206106870229,\n \"acc_stderr\": 0.042607351576445594,\n \"acc_norm\": 0.6183206106870229,\n \"acc_norm_stderr\": 0.042607351576445594\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.743801652892562,\n \"acc_stderr\": 0.039849796533028725,\n \"acc_norm\": 0.743801652892562,\n \"acc_norm_stderr\": 0.039849796533028725\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.04330043749650743,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.04330043749650743\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6809815950920245,\n \"acc_stderr\": 0.03661997551073836,\n \"acc_norm\": 0.6809815950920245,\n \"acc_norm_stderr\": 0.03661997551073836\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.33035714285714285,\n \"acc_stderr\": 0.04464285714285714,\n \"acc_norm\": 0.33035714285714285,\n \"acc_norm_stderr\": 0.04464285714285714\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7378640776699029,\n \"acc_stderr\": 0.04354631077260595,\n \"acc_norm\": 0.7378640776699029,\n \"acc_norm_stderr\": 0.04354631077260595\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.811965811965812,\n \"acc_stderr\": 0.02559819368665225,\n \"acc_norm\": 0.811965811965812,\n \"acc_norm_stderr\": 0.02559819368665225\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.04999999999999999,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.04999999999999999\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7624521072796935,\n \"acc_stderr\": 0.015218733046150195,\n \"acc_norm\": 0.7624521072796935,\n \"acc_norm_stderr\": 0.015218733046150195\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.653179190751445,\n \"acc_stderr\": 0.025624723994030454,\n \"acc_norm\": 0.653179190751445,\n \"acc_norm_stderr\": 0.025624723994030454\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3888268156424581,\n \"acc_stderr\": 0.01630389953079613,\n \"acc_norm\": 0.3888268156424581,\n \"acc_norm_stderr\": 0.01630389953079613\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6274509803921569,\n \"acc_stderr\": 0.027684181883302898,\n \"acc_norm\": 0.6274509803921569,\n \"acc_norm_stderr\": 0.027684181883302898\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.662379421221865,\n \"acc_stderr\": 0.026858825879488544,\n \"acc_norm\": 0.662379421221865,\n \"acc_norm_stderr\": 0.026858825879488544\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6419753086419753,\n \"acc_stderr\": 0.026675611926037106,\n \"acc_norm\": 0.6419753086419753,\n \"acc_norm_stderr\": 0.026675611926037106\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.40070921985815605,\n \"acc_stderr\": 0.029233465745573083,\n \"acc_norm\": 0.40070921985815605,\n \"acc_norm_stderr\": 0.029233465745573083\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4211212516297262,\n \"acc_stderr\": 0.012610325733489905,\n \"acc_norm\": 0.4211212516297262,\n \"acc_norm_stderr\": 0.012610325733489905\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5294117647058824,\n \"acc_stderr\": 0.03032024326500413,\n \"acc_norm\": 0.5294117647058824,\n \"acc_norm_stderr\": 0.03032024326500413\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.5637254901960784,\n \"acc_stderr\": 0.02006287424353913,\n \"acc_norm\": 0.5637254901960784,\n \"acc_norm_stderr\": 0.02006287424353913\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6272727272727273,\n \"acc_stderr\": 0.04631381319425465,\n \"acc_norm\": 0.6272727272727273,\n \"acc_norm_stderr\": 0.04631381319425465\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6489795918367347,\n \"acc_stderr\": 0.03055531675557364,\n \"acc_norm\": 0.6489795918367347,\n \"acc_norm_stderr\": 0.03055531675557364\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7711442786069652,\n \"acc_stderr\": 0.029705284056772432,\n \"acc_norm\": 0.7711442786069652,\n \"acc_norm_stderr\": 0.029705284056772432\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.03487350880197769,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.03487350880197769\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4939759036144578,\n \"acc_stderr\": 0.03892212195333047,\n \"acc_norm\": 0.4939759036144578,\n \"acc_norm_stderr\": 0.03892212195333047\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7719298245614035,\n \"acc_stderr\": 0.032180937956023566,\n \"acc_norm\": 0.7719298245614035,\n \"acc_norm_stderr\": 0.032180937956023566\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2729498164014688,\n \"mc1_stderr\": 0.015594753632006525,\n \"mc2\": 0.3963209435327923,\n \"mc2_stderr\": 0.014481742388552897\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7513812154696132,\n \"acc_stderr\": 0.012147314713403108\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.33206974981046244,\n \"acc_stderr\": 0.012972465034361873\n }\n}\n```", "repo_url": "https://huggingface.co/Undi95/Llamix2-Xwin-MoE-4x13B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|arc:challenge|25_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|gsm8k|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hellaswag|10_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T23-47-16.165655.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["**/details_harness|winogrande|5_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-12-16T23-47-16.165655.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_16T23_47_16.165655", "path": ["results_2023-12-16T23-47-16.165655.parquet"]}, {"split": "latest", "path": ["results_2023-12-16T23-47-16.165655.parquet"]}]}]}
2023-12-16T23:50:56+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Undi95/Llamix2-Xwin-MoE-4x13B Dataset automatically created during the evaluation run of model Undi95/Llamix2-Xwin-MoE-4x13B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-16T23:47:16.165655(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Undi95/Llamix2-Xwin-MoE-4x13B\n\n\n\nDataset automatically created during the evaluation run of model Undi95/Llamix2-Xwin-MoE-4x13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T23:47:16.165655(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Undi95/Llamix2-Xwin-MoE-4x13B\n\n\n\nDataset automatically created during the evaluation run of model Undi95/Llamix2-Xwin-MoE-4x13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-16T23:47:16.165655(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 195, 66, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Undi95/Llamix2-Xwin-MoE-4x13B\n\n\n\nDataset automatically created during the evaluation run of model Undi95/Llamix2-Xwin-MoE-4x13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-16T23:47:16.165655(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
43df386d9f526f71137ebb87f98fde7ffa10be46
# Dataset Card for "autotrain-data-zesty" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
irmcon-org/autotrain-data-zesty
[ "region:us" ]
2023-12-17T00:03:13+00:00
{"dataset_info": {"features": [{"name": "instruction", "dtype": "string"}, {"name": "input", "dtype": "string"}, {"name": "output", "dtype": "string"}, {"name": "autotrain_text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 1685581, "num_examples": 52002}, {"name": "validation", "num_bytes": 1685581, "num_examples": 52002}], "download_size": 947908, "dataset_size": 3371162}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}]}
2023-12-17T00:03:14+00:00
[]
[]
TAGS #region-us
# Dataset Card for "autotrain-data-zesty" More Information needed
[ "# Dataset Card for \"autotrain-data-zesty\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"autotrain-data-zesty\"\n\nMore Information needed" ]
[ 6, 18 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"autotrain-data-zesty\"\n\nMore Information needed" ]
d6d3fddb0f68587631d06b0df076399ac689add9
https://muliamoneychanger.co.id/2023/12/12/mendalam-ke-dunia-mengasyikkan-slot-online-sensasi-dan-keseruan-di-ujung-jari-anda%ef%bb%bf/ https://www.m2s.co.id/2023/12/12/mendalam-ke-dunia-mengasyikkan-slot-online-sensasi-dan-keseruan-di-ujung-jari-anda/ http://support.penabulu-stpi.id/2023/12/12/mendalam-ke-dunia-mengasyikkan-slot-online-sensasi-dan-keseruan-di-ujung-jari-anda/ https://aurorasolution.id/index.php/2023/12/12/mendalam-ke-dunia-mengasyikkan-slot-online-sensasi-dan-keseruan-di-ujung-jari-anda/ http://support.penabulu-stpi.id/2023/12/13/panduan-lengkap-cara-bermain-slot-online-dan-meraih-kesuksesan-di-gulungan-digital/ https://muliamoneychanger.co.id/2023/12/13/panduan-lengkap-cara-bermain-slot-online-dan-meraih-kesuksesan-di-gulungan-digital/ https://www.m2s.co.id/2023/12/13/panduan-lengkap-cara-bermain-slot-online-dan-meraih-kesuksesan-di-gulungan-digital/ https://tribratanewsponorogo.id/panduan-lengkap-cara-bermain-slot-online-dan-meraih-kesuksesan-di-gulungan-digital/ https://aurorasolution.id/index.php/2023/12/13/panduan-lengkap-cara-bermain-slot-online-dan-meraih-kesuksesan-di-gulungan-digital/ https://muliamoneychanger.co.id/2023/12/12/menggali-keunikan-permainan-slot-online-dari-teknologi-hingga-tren-terbaru/ https://tribratanewsponorogo.id/menggali-keunikan-permainan-slot-online-dari-teknologi-hingga-tren-terbaru/ https://www.m2s.co.id/2023/12/12/menggali-keunikan-permainan-slot-online-dari-teknologi-hingga-tren-terbaru/ http://support.penabulu-stpi.id/2023/12/12/menggali-keunikan-permainan-slot-online-dari-teknologi-hingga-tren-terbaru/ https://aurorasolution.id/index.php/2023/12/12/menggali-keunikan-permainan-slot-online-dari-teknologi-hingga-tren-terbaru/ https://portafoliosfit.um.edu.mx/davidvalenzuela/2023/12/13/menaklukkan-mesin-slot-online-panduan-langkah-demi-langkah-untuk-pemain-baru/ https://blog.nus.edu.sg/pgslot/2023/12/13/menaklukkan-mesin-slot-online-panduan-langkah-demi-langkah-untuk-pemain-baru/ https://www.amontessori.edu.mx/info/informatica/menaklukkan-mesin-slot-online-panduan-langkah-demi-langkah-untuk-pemain-baru/ https://arkeologisumatera.unja.ac.id/menaklukkan-mesin-slot-online-panduan-langkah-demi-langkah-untuk-pemain-baru/ https://susukan.banjarnegarakab.go.id/?p=2534 https://purwanegara.banjarnegarakab.go.id/menaklukkan-mesin-slot-online-panduan-langkah-demi-langkah-untuk-pemain-baru/ https://www.jasaadwords.web.id/blog/menaklukkan-mesin-slot-online-panduan-langkah-demi-langkah-untuk-pemain-baru/ https://www.mimbarmasjid.customfurniture.co.id/menaklukkan-mesin-slot-online-panduan-langkah-demi-langkah-untuk-pemain-baru/ https://iclassroom.obec.go.th/cop/wangyoawittayayon/2023/12/13/menaklukkan-mesin-slot-online-panduan-langkah-demi-langkah-untuk-pemain-baru/ https://www.homabayassembly.go.ke/menaklukkan-mesin-slot-online-panduan-langkah-demi-langkah-untuk-pemain-baru/ https://blog.byoote.id/index.php/2023/12/13/menaklukkan-mesin-slot-online-panduan-langkah-demi-langkah-untuk-pemain-baru/ http://uks.sragenkab.go.id/?p=15667 http://tecnicmunicipal.apabcn.cat/menaklukkan-mesin-slot-online-panduan-langkah-demi-langkah-untuk-pemain-baru/ https://tahfidz.ptqimamthobari.ponpes.id/blog/menaklukkan-mesin-slot-online-panduan-langkah-demi-langkah-untuk-pemain-baru/ https://infominang.id/2023/12/13/menaklukkan-mesin-slot-online-panduan-langkah-demi-langkah-untuk-pemain-baru/ https://eportfolios.macaulay.cuny.edu/pgslotgame/2023/12/13/menaklukkan-mesin-slot-online-panduan-langkah-demi-langkah-untuk-pemain-baru/ http://doc.gold.ac.uk/compartsblog/index.php/2023/12/13/menaklukkan-mesin-slot-online-panduan-langkah-demi-langkah-untuk-pemain-baru/ https://apocalypteriori.my.id/menaklukkan-mesin-slot-online-panduan-langkah-demi-langkah-untuk-pemain-baru/ http://1sd.al-fatah.sch.id/2023/12/13/menaklukkan-mesin-slot-online-panduan-langkah-demi-langkah-untuk-pemain-baru/ https://fiper.co.id/menaklukkan-mesin-slot-online-panduan-langkah-demi-langkah-untuk-pemain-baru/ https://formati.online/mengungkap-misteri-rtp-slot-gacor-menelusuri-peluang-menang-di-dunia-slot-online/ https://www.praxis-1010.at/20945-2/ https://www.professores.uff.br/pmoratori/2023/12/13/mengungkap-misteri-rtp-slot-gacor-menelusuri-peluang-menang-di-dunia-slot-online/ https://web.rmutp.ac.th/mct/thaksaon-k/wordpress/index.php/2023/12/13/mengungkap-misteri-rtp-slot-gacor-menelusuri-peluang-menang-di-dunia-slot-online/ https://sdg.reru.ac.th/uncategorized/1916/ https://beta.acleague.org/memaksimalkan-kemenangan-slot-online-dengan-rtp-yang-tinggi/ https://baovephuongdong.net/memaksimalkan-kemenangan-slot-online-dengan-rtp-yang-tinggi/ https://5br.art4muslim.net/memaksimalkan-kemenangan-slot-online-dengan-rtp-yang-tinggi/ https://traveltours366.com/memaksimalkan-kemenangan-slot-online-dengan-rtp-yang-tinggi/ https://demo.2sell.it/memaksimalkan-kemenangan-slot-online-dengan-rtp-yang-tinggi/ https://77propeller.com/advert/melangkah-ke-dunia-slot-online-gacor-menaklukkan-gulungan-digital-dengan-bijak/ https://martialspace.com/advert/melangkah-ke-dunia-slot-online-gacor-menaklukkan-gulungan-digital-dengan-bijak/ https://icimodels.com/advert/melangkah-ke-dunia-slot-online-gacor-menaklukkan-gulungan-digital-dengan-bijak/ https://ghiassets.com/advert/melangkah-ke-dunia-slot-online-gacor-menaklukkan-gulungan-digital-dengan-bijak/ https://nsedrm.org.np/blog/melangkah-ke-dunia-slot-online-gacor-menaklukkan-gulungan-digital-dengan-bijak/ https://thegainline.co.nz/melangkah-ke-dunia-slot-online-gacor-menaklukkan-gulungan-digital-dengan-bijak/ https://formati.online/melangkah-ke-dunia-slot-online-gacor-menaklukkan-gulungan-digital-dengan-bijak/ https://www.wcdefa.org/advert/melangkah-ke-dunia-slot-online-gacor-menaklukkan-gulungan-digital-dengan-bijak/ https://www.doortoindustry.com/melangkah-ke-dunia-slot-online-gacor-menaklukkan-gulungan-digital-dengan-bijak/ https://crazyridersnrockguitars.altervista.org/advert/melangkah-ke-dunia-slot-online-gacor-menaklukkan-gulungan-digital-dengan-bijak/?doing_wp_cron=1702475477.3492441177368164062500 https://app.socie.com.br/read-blog/116203 https://ai.ceo/read-blog/101702 https://www.palscity.com/read-blog/252049 https://linkspreed.com/read-blog/69637 https://pinkandblueparenting.com/advert/melangkah-ke-dunia-slot-online-gacor-menaklukkan-gulungan-digital-dengan-bijak/ https://blog.byoote.id/index.php/2023/12/14/panduan-pemula-untuk-memilih-agen-judi-slot-online-gacor/ https://arkeologisumatera.unja.ac.id/panduan-pemula-untuk-memilih-agen-judi-slot-online-gacor/ https://www.homabayassembly.go.ke/panduan-pemula-untuk-memilih-agen-judi-slot-online-gacor/ https://portafoliosfit.um.edu.mx/davidvalenzuela/2023/12/14/panduan-pemula-untuk-memilih-agen-judi-slot-online-gacor/ https://blog.nus.edu.sg/pgslot/2023/12/14/panduan-pemula-untuk-memilih-agen-judi-slot-online-gacor/ https://www.amontessori.edu.mx/info/informatica/panduan-pemula-untuk-memilih-agen-judi-slot-online-gacor/ https://www.jasaadwords.web.id/blog/panduan-pemula-untuk-memilih-agen-judi-slot-online-gacor/ https://iclassroom.obec.go.th/cop/wangyoawittayayon/2023/12/14/panduan-pemula-untuk-memilih-agen-judi-slot-online-gacor/ https://susukan.banjarnegarakab.go.id/?p=2546 https://www.mimbarmasjid.customfurniture.co.id/panduan-pemula-untuk-memilih-agen-judi-slot-online-gacor/ https://tahfidz.ptqimamthobari.ponpes.id/blog/agen-judi-slot-online-gacor-tips-memilih-website-yang-tepat/ http://uks.sragenkab.go.id/?p=15685 http://doc.gold.ac.uk/compartsblog/index.php/2023/12/14/agen-judi-slot-online-gacor-tips-memilih-website-yang-tepat/ https://eportfolios.macaulay.cuny.edu/pgslotgame/2023/12/14/agen-judi-slot-online-gacor-tips-memilih-website-yang-tepat/ https://apocalypteriori.my.id/agen-judi-slot-online-gacor-tips-memilih-website-yang-tepat/ http://1sd.al-fatah.sch.id/2023/12/14/agen-judi-slot-online-gacor-tips-memilih-website-yang-tepat/ https://fiper.co.id/agen-judi-slot-online-gacor-tips-memilih-website-yang-tepat/ https://infominang.id/2023/12/14/agen-judi-slot-online-gacor-tips-memilih-website-yang-tepat/ http://tecnicmunicipal.apabcn.cat/agen-judi-slot-online-gacor-tips-memilih-website-yang-tepat/ https://hackmd.io/s/B19BUZ_UT https://blog.byoote.id/index.php/2023/12/14/tips-memilih-agen-judi-slot-online-gacor-yang-terbaik/ https://arkeologisumatera.unja.ac.id/tips-memilih-agen-judi-slot-online-gacor-yang-terbaik/ https://www.homabayassembly.go.ke/tips-memilih-agen-judi-slot-online-gacor-yang-terbaik/ https://portafoliosfit.um.edu.mx/davidvalenzuela/2023/12/14/tips-memilih-agen-judi-slot-online-gacor-yang-terbaik/ https://blog.nus.edu.sg/pgslot/2023/12/14/tips-memilih-agen-judi-slot-online-gacor-yang-terbaik/ https://www.amontessori.edu.mx/info/informatica/tips-memilih-agen-judi-slot-online-gacor-yang-terbaik%ef%bb%bf/ https://www.jasaadwords.web.id/blog/tips-memilih-agen-judi-slot-online-gacor-yang-terbaik/ https://iclassroom.obec.go.th/cop/wangyoawittayayon/2023/12/14/tips-memilih-agen-judi-slot-online-gacor-yang-terbaik/ https://susukan.banjarnegarakab.go.id/?p=2548 http://uks.sragenkab.go.id/?p=15699 http://doc.gold.ac.uk/compartsblog/index.php/2023/12/14/tips-memilih-agen-judi-slot-online-gacor-yang-terbaik/ https://eportfolios.macaulay.cuny.edu/pgslotgame/2023/12/14/tips-memilih-agen-judi-slot-online-gacor-yang-terbaik/ https://apocalypteriori.my.id/tips-memilih-agen-judi-slot-online-gacor-yang-terbaik/ http://1sd.al-fatah.sch.id/2023/12/14/tips-memilih-agen-judi-slot-online-gacor-yang-terbaik/ https://fiper.co.id/tips-memilih-agen-judi-slot-online-gacor-yang-terbaik/ https://tahfidz.ptqimamthobari.ponpes.id/blog/tips-memilih-agen-judi-slot-online-gacor-yang-terbaik/ https://www.mimbarmasjid.customfurniture.co.id/tips-memilih-agen-judi-slot-online-gacor-yang-terbaik/ http://tecnicmunicipal.apabcn.cat/tips-memilih-agen-judi-slot-online-gacor-yang-terbaik/ https://hackmd.io/@fb9topnohu/By5df7OIa https://t.me/s/lucky77slot https://cbexapp.noaa.gov/tag/index.php?tc=1&tag=slot%20online https://hackmd.io/@fb9topnohu/rJg99bu8p https://www.praxis-1010.at/berlayar-di-dunia-slot-online-panduan-memilih-agen-judi-slot-gacor-yang-terpercaya/ https://www.professores.uff.br/pmoratori/2023/12/14/berlayar-di-dunia-slot-online-panduan-memilih-agen-judi-slot-gacor-yang-terpercaya/ https://formati.online/berlayar-di-dunia-slot-online-panduan-memilih-agen-judi-slot-gacor-yang-terpercaya/ https://satudata.landakkab.go.id/uploads/user/2023-12-14-072034.937742Lucky77slot.html https://opendata.brebeskab.go.id/uploads/user/2023-12-14-072150.682743Lucky77slot.html https://prokopim.sumbawakab.go.id/assets/konten/files/Lucky77slot.shtml https://umaberingin.desa.id/assets/konten/files/Lucky77slot.shtml https://data.kepahiangkab.go.id/uploads/user/2023-12-14-074306.845329Lucky77slot.html https://whiteseo.hashnode.dev/memahami-keunikan-situs-slot-online-gacor-2024 https://xiglute.com/blogs/20244994/195631/memahami-keunikan-situs-slot-online-gacor-2024 https://bresdel.com/blogs/397000/Memahami-Keunikan-Situs-Slot-Online-Gacor-2024 https://usa.life/read-blog/48103 https://journals.eco-vector.com/RCF/comment/view/734/7393/71035 https://orlandomais.com/advert/memahami-keunikan-situs-slot-online-gacor-2024/ https://www.udjama.org/advert/memahami-keunikan-situs-slot-online-gacor-2024/ https://www.montessori.club/advert/memahami-keunikan-situs-slot-online-gacor-2024/ https://koreanlifenews.com/advert/memahami-keunikan-situs-slot-online-gacor-2024/ https://setbooks.co.zw/advert/memahami-keunikan-situs-slot-online-gacor-2024/ https://plume.pullopen.xyz/~/blog/Menjelajahi%20Keindahan%20Slot%20Online%20Gacor%20di%20Tahun%202024 https://cbexapp.noaa.gov/tag/index.php?tc=1&tag=slot%20gacor%20hari%20ini https://www.cucinamancina.com/index.php?option=com_community&view=profile&userid=30713 https://agenciaisladepascua.com/2023/12/14/menjelajahi-keindahan-slot-online-gacor-di-tahun-2024/ https://carlamelfe.com/uncategorized/menjelajahi-keindahan-slot-online-gacor-di-tahun-2024/ https://ucgp.jujuy.edu.ar/profile/ambkingpgsoftgmailcom/ https://www.livingcolorsalon.com/forum/general-discussions/menjelajahi-keindahan-slot-online-gacor-di-tahun-2024 https://www.funddreamer.com/users/mdm-ahref8 https://heylink.me/mdm71/ http://igamepublisher.com/menjelajahi-keindahan-slot-online-gacor-di-tahun-2024/ http://www.ctump.edu.vn/Default.aspx?tabid=115&userId=16842 https://socialblast.club/read-blog/24307 https://startuppoint.copiny.com/question/details/id/790496 https://gotartwork.com/Blog/menjelajahi-slot-online-gacor-dengan-keistimewaan-tertinggi/244726/ https://www.hoggit.com/Object/26687/menjelajahi-slot-online-gacor-dengan-keistimewaan-tertinggi-menjelajahi-slot-online-gacor-dengan-kei https://theinfluencerz.com/menjelajahi-slot-online-gacor-dengan-keistimewaan-tertinggi/ https://www.micromentor.org/question/16932 https://shop.sector.business/2023/12/14/menjelajahi-slot-online-gacor-dengan-keistimewaan-tertinggi/ https://ambking.wodemo.net/entry/543747 https://tapchivatuyentap.tlu.edu.vn/Activity-Feed/My-Profile/UserId/9669 https://indiefilmstube.com/menjelajahi-slot-online-gacor-dengan-keistimewaan-tertinggi https://ambking999-pg.company.site/ https://indiefilmstube.com/menjelajahi-slot-online-gacor-dengan-keistimewaan-tertinggi https://www.giffa.ru/who/lucky77-merenungi-pesona-slot-online-gacor-yang-luar-biasa/ https://nitaviore.federatedjournals.com/lucky77-merenungi-pesona-slot-online-gacor-yang-luar-biasa/ https://studygoodenglish.com/course/blog/index.php?entryid=6821 https://www.lawyersclubindia.com/profile.asp?member_id=935499 https://www.musictraveler.com/en/users/noorhikmah/ https://www.wefifo.com/user/profile/424157957490300 https://blocs.xtec.cat/pgslotgame/2023/12/15/menjelajahi-slot-online-gacor-dengan-keistimewaan-tertinggi/ https://www.jqwidgets.com/community/users/pgslotjoko98/ https://starcourts.com/author/lucky77slotgacor-51520/ https://www.coupondetect.com/author/pgslot9g9322/ https://heavenarticle.com/author/lucky77slotamanah-24449/ https://gamereleasetoday.com/events/fenomena-judi-slot-online-antara-hiburan-dan-tantangan-kesejahteraan/ https://developmentmi.com/author/noorhikmah2-31717/ https://situs-slot-online-gacor-2024.webflow.io/ http://crcdourados.com.br/blog/index.php?entryid=7991 https://gujaratiuk.com/author/adminpgslot/ https://www.blog.sbs.com.br/forum/sbs-educacao/pg-sl-tewbtrngaetkngaaykhuue-aair https://notionpress.com/author/918800 https://vocal.media/authors/admin-r6va026n https://komunitas.jaklingkoindonesia.co.id/blogs/10575/Bandar-Judi-Slot-Online-Resmi-dan-Implikasinya https://vendors.mikolo.com/forums/discussion/planning/lucky-slot-gacor-terbaik-terpercaya-resmi https://www.beatstars.com/fb9thethao http://www.girasoleconsulenzaeformazione.it/fad/claroline/phpbb/viewtopic.php?topic=27&cidReset=true&cidReq=FB9 https://xiglute.com/blogs/20244994/195685/agen-judi-slot-online-resmi-dijamin-gacor-dan-gampang-maxwinnya https://agoracom.com/ir/edigital/forums/off-topic/topics/797034-agen-judi-slot-online-resmi-dijamin-gacor-dan-gampang-maxwinnya/messages/2401596#message http://ank.ssk.in.th/index.php?name=webboard&file=read&id=10739 https://www.quia.com/profiles/fb9252 https://innove.org/campus/app/upload/users/5/5419/Lucky77slot.html http://darelbachra.com/app/webroot/js/kcfinder/upload/files/Lucky77slot.shtml https://greffe-cheveux-tunisie.fr/ckeditor/kcfinder/upload/files/Lucky77slot.shtml https://alzinda.fr/ckeditor/kcfinder/upload/files/Lucky77slot.shtml http://triomil.cz/kcfinder/upload/files/Lucky77slot.shtml http://ieciudaddeasis.edu.co/aula/main/upload/users/2/2362/my_files/Lucky77slot.html https://ckan-shimane.dataeye.jp/uploads/user/2023-12-14-192101.700147Lucky77slot.html https://catalog2.gbdi.cloud/uploads/user/2023-12-14-192642.006419Lucky77slot.html https://datasets.fieldsofview.in/uploads/user/2023-12-15-033635.982236Lucky77slot.html https://mecal.vn/public/assets/ckeditor/kcfinder/upload/files/slot/Lucky77slot.shtml https://www.kermaz.fr/js/kcfinder/upload/files/Lucky77slot.shtml https://kimhanh.com.vn/kcfinder/upload/files/Lucky77slot.shtml https://youme-project.eu/platform/app/upload/users/6/6912/my_files/Lucky77slot.html https://stba-pertiwi.ac.id/slotonline/ https://man2polewalimandar.sch.id/slot-online/ https://lppm.iaiddipolewalimandar.ac.id/slot-online/ https://data.binhduong.gov.vn/uploads/user/2023-12-15-064651.728883Lucky77slot.html http://publication.lecames.org/files/journals/9/articles/29838/submission/original/29838-204231-1-SM.html http://www.ecoforumjournal.ro/files/journals/1/articles/1971/submission/original/1971-5303-1-SM.html https://vendors.mikolo.com/forums/discussion/mikolo-community/berlayar-di-dunia-slot-online https://arkeologisumatera.unja.ac.id/menguak-dunia-agen-slot-online/ https://plume.pullopen.xyz/~/blog/Lucky77slotonline https://fiper.co.id/menguak-situs-agen-slot-online-paling-gacor/ http://doc.gold.ac.uk/compartsblog/index.php/2023/12/15/menguak-situs-agen-slot-online-paling-gacor/ https://muliamoneychanger.co.id/2023/12/15/menguak-situs-agen-slot-online-paling-gacor/ https://caravanastore.com.br/eksplorasi-dunia-agen-slot-online-petualangan-taruhan-daring-yang-menarik/ https://shortletexpress.com/2023/12/15/eksplorasi-dunia-agen-slot-online/ https://dancekitchen.ru/2023/12/15/eksplorasi-dunia-agen-slot-online-petualangan-taruhan-daring-yang-menarik/ https://tm-viewing.ru/2023/12/15/eksplorasi-dunia-agen-slot-online-petualangan-taruhan-daring-yang-menarik/ https://www.hoggit.com/Object/26702/situs-slot-online-gacor-agen-situs-slot-online-gacor-telah-mengubah-cara-orang-berjudi-membawa-penga https://lanoticia.hn/advert/situs-slot-online-gacor/ https://celtindependent.com/advert/situs-slot-online-gacor/ https://rollandskate.be/advert/situs-slot-online-gacor/ https://marilwyd.co.uk/situs-slot-online-gacor/ https://pawlukycia.com.ar/situs-slot-online-gacor/ https://assessment.alan.co.id/lucky77-petualangan-bermain-game-slot-online-gacor-yang-mengasyikkan/ https://www.zemnipracejedlicka.cz/lucky77-petualangan-bermain-game-slot-online-gacor-yang-mengasyikkan/ https://ravenwellnesstraininginstitute.com/services/lucky77-petualangan-bermain-game-slot-online-gacor-yang-mengasyikkan/ https://www.iberoamericano.edu.ec/lucky77-petualangan-bermain-game-slot-online-gacor-yang-mengasyikkan/ https://techtrustbd.com/lucky77-petualangan-bermain-game-slot-online-gacor-yang-mengasyikkan/ https://www.aznews.tv/lucky77-petualangan-bermain-game-slot-online-gacor-yang-mengasyikkan/ https://shop.stemuae.com/lucky77-petualangan-bermain-game-slot-online-gacor-yang-mengasyikkan/ https://castingtalentworld.com/lucky77-petualangan-bermain-game-slot-online-gacor-yang-mengasyikkan/ https://britanniamajorettes.co.uk/lucky77-petualangan-bermain-game-slot-online-gacor-yang-mengasyikkan/ https://kocaaga.com.tr/lucky77-petualangan-bermain-game-slot-online-gacor-yang-mengasyikkan/ https://yuki-anime.com/lucky77-petualangan-bermain-game-slot-online-gacor-yang-mengasyikkan/ https://www.digitalshivamsharma.com/lucky77-petualangan-bermain-game-slot-online-gacor-yang-mengasyikkan/ https://lowermerionihc.com/lucky77-petualangan-bermain-game-slot-online-gacor-yang-mengasyikkan/ https://foodgame.ie/lucky77-petualangan-bermain-game-slot-online-gacor-yang-mengasyikkan/ https://xaydungbuigia.com/lucky77-petualangan-bermain-game-slot-online-gacor-yang-mengasyikkan/ https://acephysio.org/lucky77-petualangan-bermain-game-slot-online-gacor-yang-mengasyikkan/ https://www.iyres.gov.my/index.php?option=com_booklibrary&task=view_bl&catid=53&id=66&Itemid=297 https://www.blogger.com/profile/09535648797865568537 https://www.psyrehab.ca/forums/thread/ba243d4 http://dev.fderecho.net/aulavirtual/claroline/phpbb/viewtopic.php?topic=6&cidReset=true&cidReq=FB9 http://entsaintetienne.free.fr/claroline1110/claroline/phpbb/viewtopic.php?topic=2&cidReset=true&cidReq=ADAPOKER303 https://viva99-slot-login.mybranchbob.com/game-slot-online-gacor https://www.evtv.me/author/pgsoftzaelani/ https://poeditor.com/join/project/8xGpR9NZyl https://warofdragons.com/forum/index.php?page=UserGuestbook&userID=76998&entryID=571#entry571 https://www.zemnipracejedlicka.cz/memburu-jackpot-keasyikan-dan-strategi-di-balik-game-slot-online/ https://ravenwellnesstraininginstitute.com/services/memburu-jackpot-keasyikan-dan-strategi-di-balik-game-slot-online/ https://www.aznews.tv/memburu-jackpot-keasyikan-dan-strategi-di-balik-game-slot-online/ https://shop.stemuae.com/memburu-jackpot-keasyikan-dan-strategi-di-balik-game-slot-online/ https://yuki-anime.com/memburu-jackpot-keasyikan-dan-strategi-di-balik-game-slot-online/ https://www.digitalshivamsharma.com/memburu-jackpot-keasyikan-dan-strategi-di-balik-game-slot-online/ https://lowermerionihc.com/21208-2/ https://techtrustbd.com/memburu-jackpot-keasyikan-dan-strategi-di-balik-game-slot-online/ https://castingtalentworld.com/memburu-jackpot-keasyikan-dan-strategi-di-balik-game-slot-online/ https://britanniamajorettes.co.uk/memburu-jackpot-keasyikan-dan-strategi-di-balik-game-slot-online/ https://kocaaga.com.tr/lucky77-maxwin-keasyikan-dan-strategi-di-balik-game-slot-online/ https://foodgame.ie/lucky77-maxwin-keasyikan-dan-strategi-di-balik-game-slot-online/ https://xaydungbuigia.com/memburu-jackpot-keasyikan-dan-strategi-di-balik-game-slot-online/ https://www.psyrehab.ca/forums/thread/bc291f4 http://dev.fderecho.net/aulavirtual/claroline/phpbb/viewtopic.php?topic=7&cidReset=true&cidReq=FB9 http://entsaintetienne.free.fr/claroline1110/claroline/phpbb/viewtopic.php?topic=3&cidReset=true&cidReq=ADAPOKER303 https://viva99-slot-login.mybranchbob.com/memburu-jackpot-keasyikan-dan-strategi-di-balik-game-slot-online  https://plume.pullopen.xyz/~/blog/Melangkah%20ke%20Dunia%20Taruhan%20Modern:%20Slot%20Online%20dan%20Sensasi%20Menang%20Besar https://warofdragons.com/forum/index.php?page=UserGuestbook&userID=76998&entryID=572#entry572 https://www.iyres.gov.my/index.php?option=com_booklibrary&task=view_bl&catid=53&id=66&Itemid=297 https://www.hoggit.com/Object/26725/melangkah-ke-dunia-taruhan-modern-slot-online-dan-sensasi-menang-besar-dalam-era-perjudian-daring-ya https://french24.ir/forum/profile/noorhikmah/ https://elearning.fastikom-unsiq.ac.id/claroline/phpbb/viewtopic.php?topic=1800&cidReset=true&cidReq=BUSINESS111 https://whizolosophy.com/category/politics-governance/article-essay/melangkah-ke-dunia-taruhan-modern-slot-online-dan-sensasi-menang-besar https://troickoe22.ru/advert/slot-online-dan-sensasi-menang-besar/ https://plaza.rakuten.co.jp/fb9casinotruc/diary/202310180000/ https://congmuaban.vn/page/asinotr454770/thong-tin-thanh-toan/ https://marilwyd.co.uk/melangkah-ke-dunia-taruhan-modern-slot-online-dan-sensasi-menang-besar/ https://www.prakse.lv/article/2027/melangkah-ke-dunia-taruhan-modern-slot-online-dan-sensasi-menang-besar https://bulkwp.com/support-forums/users/fb9gamingx/ https://www.ourboox.com/books/melangkah-ke-dunia-taruhan-modern-slot-online-dan-sensasi-menang-besar/ https://travefy.com/discover/slotonline https://slotonline.kktix.cc/
slotonlinegacor/backlink
[ "region:us" ]
2023-12-17T00:13:53+00:00
{}
2023-12-17T00:14:28+00:00
[]
[]
TAGS #region-us
URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL https://t.me/s/lucky77slot URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL.business/2023/12/14/menjelajahi-slot-online-gacor-dengan-keistimewaan-tertinggi/ URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL  URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL
[]
[ "TAGS\n#region-us \n" ]
[ 6 ]
[ "passage: TAGS\n#region-us \n" ]
aeb758f6f7a7c7c2b2eb49cde6f91e2fd3a586a3
# ObjectNet (720P Shortest Edge) A webp (lossless) encoded version of [ObjectNet-1.0](https://objectnet.dev/index.html) resized to shortest edge = 720 pixels. ## License / Usage Terms ObjectNet is free to use for both research and commercial applications. The authors own the source images and allow their use under a license derived from Creative Commons Attribution 4.0 with only two additional clauses. 1. **ObjectNet may never be used to tune the parameters of any model.** 2. **Any individual images from ObjectNet may only be posted to the web including their 1 pixel red border**. If you are using ObjectNet, please cite our work, the citation appears at the bottom of this page. Any derivative of ObjectNet must contain attribution as well. ## About What is ObjectNet? * A new kind of vision dataset borrowing the idea of controls from other areas of science. * No training set, only a test set! Put your vision system through its paces. * Collected to intentionally show objects from new viewpoints on new backgrounds. * 50,000 image test set, same as ImageNet, with controls for rotation, background, and viewpoint. * 313 object classes with 113 overlapping ImageNet * Large performance drop, what you can expect from vision systems in the real world! * Robust to fine-tuning and a very difficult transfer learning problem ## Why the Red Borders / How do I recognize if an image is in ObjectNet? As training sets become huge, the risk that test and training sets overlap is serious. We provide ObjectNet with a 2 pixel red border around each image which must be removed before performing inference. The ObjectNet license requires that if you post images from ObjectNet to the web, you include this border. Any time you see an image with a solid 2 pixel red border, that's an indication it's in someone's test set and you should be careful about training on it. Reverse image search will allow you to figure out which test set it is from. NOTE: original ObjectNet PNG files actually have a 2 pixel red border while their descriptions say 1. ## Preprocessing Steps for This timm Version 1. Remove 2-pixel red border. 2. Resize images to shortest edge = 720 pixels (if shortest edge larger than this). 3. Add back 1-pixel red border. 4. Re-encode PNG images with lossless WebP. In total, ~76% of original size. 5. Add `imagenet_labels` and `imagenet_synsets` consisting of lists of ImageNet-1k classes that overlap with ObjectNet class. ## Citation ```bibtex @incollection{NIPS2019_9142, title = {ObjectNet: A large-scale bias-controlled dataset for pushing the limits of object recognition models}, author = {Barbu, Andrei and Mayo, David and Alverio, Julian and Luo, William and Wang, Christopher and Gutfreund, Dan and Tenenbaum, Josh and Katz, Boris}, booktitle = {Advances in Neural Information Processing Systems 32}, editor = {H. Wallach and H. Larochelle and A. Beygelzimer and F. d\textquotesingle Alch\'{e}-Buc and E. Fox and R. Garnett}, pages = {9448--9458}, year = {2019}, publisher = {Curran Associates, Inc.}, url = {http://papers.nips.cc/paper/9142-objectnet-a-large-scale-bias-controlled-dataset-for-pushing-the-limits-of-object-recognition-models.pdf} } ```
timm/objectnet-720p
[ "region:us" ]
2023-12-17T00:57:57+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "air_freshener", "1": "alarm_clock", "2": "backpack", "3": "baking_sheet", "4": "banana", "5": "band_aid", "6": "baseball_bat", "7": "baseball_glove", "8": "basket", "9": "bathrobe", "10": "battery", "11": "bed_sheet", "12": "beer_bottle", "13": "beer_can", "14": "belt", "15": "bench", "16": "bicycle", "17": "bike_pump", "18": "bills_money", "19": "binder_closed", "20": "biscuits", "21": "blanket", "22": "blender", "23": "blouse", "24": "board_game", "25": "book_closed", "26": "bookend", "27": "boots", "28": "bottle_cap", "29": "bottle_opener", "30": "bottle_stopper", "31": "box", "32": "bracelet", "33": "bread_knife", "34": "bread_loaf", "35": "briefcase", "36": "brooch", "37": "broom", "38": "bucket", "39": "butchers_knife", "40": "butter", "41": "button", "42": "calendar", "43": "can_opener", "44": "candle", "45": "canned_food", "46": "cd_case", "47": "cellphone", "48": "cellphone_case", "49": "cellphone_charger", "50": "cereal", "51": "chair", "52": "cheese", "53": "chess_piece", "54": "chocolate", "55": "chopstick", "56": "clothes_hamper", "57": "clothes_hanger", "58": "coaster", "59": "coffee_beans", "60": "coffee_french_press", "61": "coffee_grinder", "62": "coffee_machine", "63": "coffee_table", "64": "coin_money", "65": "comb", "66": "combination_lock", "67": "computer_mouse", "68": "contact_lens_case", "69": "cooking_oil_bottle", "70": "cork", "71": "cutting_board", "72": "deodorant", "73": "desk_lamp", "74": "detergent", "75": "dish_soap", "76": "document_folder_closed", "77": "dog_bed", "78": "doormat", "79": "drawer_open", "80": "dress", "81": "dress_pants", "82": "dress_shirt", "83": "dress_shoe_men", "84": "dress_shoe_women", "85": "drill", "86": "drinking_cup", "87": "drinking_straw", "88": "drying_rack_for_clothes", "89": "drying_rack_for_dishes", "90": "dust_pan", "91": "dvd_player", "92": "earbuds", "93": "earring", "94": "egg", "95": "egg_carton", "96": "envelope", "97": "eraser_white_board", "98": "extension_cable", "99": "eyeglasses", "100": "fan", "101": "figurine_or_statue", "102": "first_aid_kit", "103": "flashlight", "104": "floss_container", "105": "flour_container", "106": "fork", "107": "frying_pan", "108": "full_sized_towel", "109": "glue_container", "110": "hair_brush", "111": "hair_dryer", "112": "hairclip", "113": "hairtie", "114": "hammer", "115": "hand_mirror", "116": "hand_towel_or_rag", "117": "handbag", "118": "hat", "119": "headphones_over_ear", "120": "helmet", "121": "honey_container", "122": "ice", "123": "ice_cube_tray", "124": "iron_for_clothes", "125": "ironing_board", "126": "jam", "127": "jar", "128": "jeans", "129": "kettle", "130": "key_chain", "131": "keyboard", "132": "ladle", "133": "lampshade", "134": "laptop_charger", "135": "laptop_open", "136": "leaf", "137": "leggings", "138": "lemon", "139": "letter_opener", "140": "lettuce", "141": "light_bulb", "142": "lighter", "143": "lipstick", "144": "loofah", "145": "magazine", "146": "makeup", "147": "makeup_brush", "148": "marker", "149": "match", "150": "measuring_cup", "151": "microwave", "152": "milk", "153": "mixing_salad_bowl", "154": "monitor", "155": "mouse_pad", "156": "mouthwash", "157": "mug", "158": "multitool", "159": "nail_clippers", "160": "nail_fastener", "161": "nail_file", "162": "nail_polish", "163": "napkin", "164": "necklace", "165": "newspaper", "166": "night_light", "167": "nightstand", "168": "notebook", "169": "notepad", "170": "nut_for_screw", "171": "orange", "172": "oven_mitts", "173": "padlock", "174": "paint_can", "175": "paintbrush", "176": "paper", "177": "paper_bag", "178": "paper_plates", "179": "paper_towel", "180": "paperclip", "181": "peeler", "182": "pen", "183": "pencil", "184": "pepper_shaker", "185": "pet_food_container", "186": "phone_landline", "187": "photograph_printed", "188": "pill_bottle", "189": "pill_organizer", "190": "pillow", "191": "pitcher", "192": "placemat", "193": "plastic_bag", "194": "plastic_cup", "195": "plastic_wrap", "196": "plate", "197": "playing_cards", "198": "pliers", "199": "plunger", "200": "pop_can", "201": "portable_heater", "202": "poster", "203": "power_bar", "204": "power_cable", "205": "printer", "206": "raincoat", "207": "rake", "208": "razor", "209": "receipt", "210": "remote_control", "211": "removable_blade", "212": "ribbon", "213": "ring", "214": "rock", "215": "rolling_pin", "216": "ruler", "217": "running_shoe", "218": "safety_pin", "219": "salt_shaker", "220": "sandal", "221": "scarf", "222": "scissors", "223": "screw", "224": "scrub_brush", "225": "sewing_kit", "226": "shampoo_bottle", "227": "shoelace", "228": "shorts", "229": "shovel", "230": "skateboard", "231": "skirt", "232": "sleeping_bag", "233": "slipper", "234": "soap_bar", "235": "soap_dispenser", "236": "sock", "237": "soup_bowl", "238": "spatula", "239": "speaker", "240": "sponge", "241": "spoon", "242": "spray_bottle", "243": "squeegee", "244": "squeeze_bottle", "245": "standing_lamp", "246": "stapler", "247": "step_stool", "248": "still_camera", "249": "stopper_sink_tub", "250": "strainer", "251": "stuffed_animal", "252": "sugar_container", "253": "suit_jacket", "254": "suitcase", "255": "sunglasses", "256": "sweater", "257": "swimming_trunks", "258": "t-shirt", "259": "table_knife", "260": "tablecloth", "261": "tablet_ipad", "262": "tanktop", "263": "tape", "264": "tape_measure", "265": "tarp", "266": "teabag", "267": "teapot", "268": "tennis_racket", "269": "thermometer", "270": "thermos", "271": "throw_pillow", "272": "tie", "273": "tissue", "274": "toaster", "275": "toilet_paper_roll", "276": "tomato", "277": "tongs", "278": "toothbrush", "279": "toothpaste", "280": "tote_bag", "281": "toy", "282": "trash_bag", "283": "trash_bin", "284": "travel_case", "285": "tray", "286": "trophy", "287": "tv", "288": "tweezers", "289": "umbrella", "290": "usb_cable", "291": "usb_flash_drive", "292": "vacuum_cleaner", "293": "vase", "294": "video_camera", "295": "walker", "296": "walking_cane", "297": "wallet", "298": "watch", "299": "water_bottle", "300": "water_filter", "301": "webcam", "302": "weight_exercise", "303": "weight_scale", "304": "wheel", "305": "whisk", "306": "whistle", "307": "wine_bottle", "308": "wine_glass", "309": "winter_glove", "310": "wok", "311": "wrench", "312": "ziploc_bag"}}}}, {"name": "imagenet_labels", "sequence": "int64"}, {"name": "imagenet_synsets", "sequence": "string"}, {"name": "image_id", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 45761885552.673, "num_examples": 50273}], "download_size": 45600921853, "dataset_size": 45761885552.673}, "configs": [{"config_name": "default", "data_files": [{"split": "test", "path": "data/test-*"}]}], "extra_gated_prompt": "By clicking on \u201cAccess repository\u201d below, you also agree to ObjectNet Terms: ObjectNet is free to use for both research and commercial applications. The authors own the source images and allow their use under a license derived from Creative Commons Attribution 4.0 with only two additional clauses.\n1. ObjectNet may never be used to tune the parameters of any model.\n2. Any individual images from ObjectNet may only be posted to the web including their 1 pixel red border.\nIf you are using ObjectNet, please cite our work, the citation appears at the bottom of this page. Any derivative of ObjectNet must contain attribution as well."}
2023-12-17T04:10:01+00:00
[]
[]
TAGS #region-us
# ObjectNet (720P Shortest Edge) A webp (lossless) encoded version of ObjectNet-1.0 resized to shortest edge = 720 pixels. ## License / Usage Terms ObjectNet is free to use for both research and commercial applications. The authors own the source images and allow their use under a license derived from Creative Commons Attribution 4.0 with only two additional clauses. 1. ObjectNet may never be used to tune the parameters of any model. 2. Any individual images from ObjectNet may only be posted to the web including their 1 pixel red border. If you are using ObjectNet, please cite our work, the citation appears at the bottom of this page. Any derivative of ObjectNet must contain attribution as well. ## About What is ObjectNet? * A new kind of vision dataset borrowing the idea of controls from other areas of science. * No training set, only a test set! Put your vision system through its paces. * Collected to intentionally show objects from new viewpoints on new backgrounds. * 50,000 image test set, same as ImageNet, with controls for rotation, background, and viewpoint. * 313 object classes with 113 overlapping ImageNet * Large performance drop, what you can expect from vision systems in the real world! * Robust to fine-tuning and a very difficult transfer learning problem ## Why the Red Borders / How do I recognize if an image is in ObjectNet? As training sets become huge, the risk that test and training sets overlap is serious. We provide ObjectNet with a 2 pixel red border around each image which must be removed before performing inference. The ObjectNet license requires that if you post images from ObjectNet to the web, you include this border. Any time you see an image with a solid 2 pixel red border, that's an indication it's in someone's test set and you should be careful about training on it. Reverse image search will allow you to figure out which test set it is from. NOTE: original ObjectNet PNG files actually have a 2 pixel red border while their descriptions say 1. ## Preprocessing Steps for This timm Version 1. Remove 2-pixel red border. 2. Resize images to shortest edge = 720 pixels (if shortest edge larger than this). 3. Add back 1-pixel red border. 4. Re-encode PNG images with lossless WebP. In total, ~76% of original size. 5. Add 'imagenet_labels' and 'imagenet_synsets' consisting of lists of ImageNet-1k classes that overlap with ObjectNet class.
[ "# ObjectNet (720P Shortest Edge)\n\nA webp (lossless) encoded version of ObjectNet-1.0 resized to shortest edge = 720 pixels.", "## License / Usage Terms\n\nObjectNet is free to use for both research and commercial applications. The authors own the source images and allow their use under a license derived from Creative Commons Attribution 4.0 with only two additional clauses.\n\n1. ObjectNet may never be used to tune the parameters of any model.\n2. Any individual images from ObjectNet may only be posted to the web including their 1 pixel red border.\nIf you are using ObjectNet, please cite our work, the citation appears at the bottom of this page. Any derivative of ObjectNet must contain attribution as well.", "## About\n\nWhat is ObjectNet?\n* A new kind of vision dataset borrowing the idea of controls from other areas of science.\n* No training set, only a test set! Put your vision system through its paces.\n* Collected to intentionally show objects from new viewpoints on new backgrounds.\n* 50,000 image test set, same as ImageNet, with controls for rotation, background, and viewpoint.\n* 313 object classes with 113 overlapping ImageNet\n* Large performance drop, what you can expect from vision systems in the real world!\n* Robust to fine-tuning and a very difficult transfer learning problem", "## Why the Red Borders / How do I recognize if an image is in ObjectNet?\n\nAs training sets become huge, the risk that test and training sets overlap is serious. We provide ObjectNet with a 2 pixel red border around each image which must be removed before performing inference. The ObjectNet license requires that if you post images from ObjectNet to the web, you include this border. Any time you see an image with a solid 2 pixel red border, that's an indication it's in someone's test set and you should be careful about training on it. Reverse image search will allow you to figure out which test set it is from.\n\nNOTE: original ObjectNet PNG files actually have a 2 pixel red border while their descriptions say 1.", "## Preprocessing Steps for This timm Version\n1. Remove 2-pixel red border.\n2. Resize images to shortest edge = 720 pixels (if shortest edge larger than this).\n3. Add back 1-pixel red border.\n4. Re-encode PNG images with lossless WebP. In total, ~76% of original size.\n5. Add 'imagenet_labels' and 'imagenet_synsets' consisting of lists of ImageNet-1k classes that overlap with ObjectNet class." ]
[ "TAGS\n#region-us \n", "# ObjectNet (720P Shortest Edge)\n\nA webp (lossless) encoded version of ObjectNet-1.0 resized to shortest edge = 720 pixels.", "## License / Usage Terms\n\nObjectNet is free to use for both research and commercial applications. The authors own the source images and allow their use under a license derived from Creative Commons Attribution 4.0 with only two additional clauses.\n\n1. ObjectNet may never be used to tune the parameters of any model.\n2. Any individual images from ObjectNet may only be posted to the web including their 1 pixel red border.\nIf you are using ObjectNet, please cite our work, the citation appears at the bottom of this page. Any derivative of ObjectNet must contain attribution as well.", "## About\n\nWhat is ObjectNet?\n* A new kind of vision dataset borrowing the idea of controls from other areas of science.\n* No training set, only a test set! Put your vision system through its paces.\n* Collected to intentionally show objects from new viewpoints on new backgrounds.\n* 50,000 image test set, same as ImageNet, with controls for rotation, background, and viewpoint.\n* 313 object classes with 113 overlapping ImageNet\n* Large performance drop, what you can expect from vision systems in the real world!\n* Robust to fine-tuning and a very difficult transfer learning problem", "## Why the Red Borders / How do I recognize if an image is in ObjectNet?\n\nAs training sets become huge, the risk that test and training sets overlap is serious. We provide ObjectNet with a 2 pixel red border around each image which must be removed before performing inference. The ObjectNet license requires that if you post images from ObjectNet to the web, you include this border. Any time you see an image with a solid 2 pixel red border, that's an indication it's in someone's test set and you should be careful about training on it. Reverse image search will allow you to figure out which test set it is from.\n\nNOTE: original ObjectNet PNG files actually have a 2 pixel red border while their descriptions say 1.", "## Preprocessing Steps for This timm Version\n1. Remove 2-pixel red border.\n2. Resize images to shortest edge = 720 pixels (if shortest edge larger than this).\n3. Add back 1-pixel red border.\n4. Re-encode PNG images with lossless WebP. In total, ~76% of original size.\n5. Add 'imagenet_labels' and 'imagenet_synsets' consisting of lists of ImageNet-1k classes that overlap with ObjectNet class." ]
[ 6, 38, 120, 136, 162, 110 ]
[ "passage: TAGS\n#region-us \n# ObjectNet (720P Shortest Edge)\n\nA webp (lossless) encoded version of ObjectNet-1.0 resized to shortest edge = 720 pixels.## License / Usage Terms\n\nObjectNet is free to use for both research and commercial applications. The authors own the source images and allow their use under a license derived from Creative Commons Attribution 4.0 with only two additional clauses.\n\n1. ObjectNet may never be used to tune the parameters of any model.\n2. Any individual images from ObjectNet may only be posted to the web including their 1 pixel red border.\nIf you are using ObjectNet, please cite our work, the citation appears at the bottom of this page. Any derivative of ObjectNet must contain attribution as well.## About\n\nWhat is ObjectNet?\n* A new kind of vision dataset borrowing the idea of controls from other areas of science.\n* No training set, only a test set! Put your vision system through its paces.\n* Collected to intentionally show objects from new viewpoints on new backgrounds.\n* 50,000 image test set, same as ImageNet, with controls for rotation, background, and viewpoint.\n* 313 object classes with 113 overlapping ImageNet\n* Large performance drop, what you can expect from vision systems in the real world!\n* Robust to fine-tuning and a very difficult transfer learning problem## Why the Red Borders / How do I recognize if an image is in ObjectNet?\n\nAs training sets become huge, the risk that test and training sets overlap is serious. We provide ObjectNet with a 2 pixel red border around each image which must be removed before performing inference. The ObjectNet license requires that if you post images from ObjectNet to the web, you include this border. Any time you see an image with a solid 2 pixel red border, that's an indication it's in someone's test set and you should be careful about training on it. Reverse image search will allow you to figure out which test set it is from.\n\nNOTE: original ObjectNet PNG files actually have a 2 pixel red border while their descriptions say 1." ]
5e995eb90503c3b457e33e57bbebc2e4829fbd44
Такой себе репозиторий, да и модель, если честно, не очень. <div align="center"> <a href="https://www.youtube.com/@Nostoro"> <img src="https://huggingface.co/datasets/tomber0/mc-Nasos/resolve/main/underrailicon1.png" /><br> </a> </div>
tomber0/mc-Nasos
[ "license:mit", "region:us" ]
2023-12-17T02:23:39+00:00
{"license": "mit"}
2023-12-17T18:28:54+00:00
[]
[]
TAGS #license-mit #region-us
Такой себе репозиторий, да и модель, если честно, не очень. <div align="center"> <a href="URL <img src="URL /><br> </a> </div>
[]
[ "TAGS\n#license-mit #region-us \n" ]
[ 11 ]
[ "passage: TAGS\n#license-mit #region-us \n" ]
a1107124e0178d5683a617965a405df6e6284349
# ObjectNet (720P Shortest Edge, ImageNet-1k Overlap) A webp (lossless) encoded version of [ObjectNet-1.0](https://objectnet.dev/index.html) resized to shortest edge = 720 pixels. Containing only the 113 classes that overlap with ImageNet-1k. ## License / Usage Terms ObjectNet is free to use for both research and commercial applications. The authors own the source images and allow their use under a license derived from Creative Commons Attribution 4.0 with only two additional clauses. 1. **ObjectNet may never be used to tune the parameters of any model.** 2. **Any individual images from ObjectNet may only be posted to the web including their 1 pixel red border**. If you are using ObjectNet, please cite our work, the citation appears at the bottom of this page. Any derivative of ObjectNet must contain attribution as well. ## About What is ObjectNet? * A new kind of vision dataset borrowing the idea of controls from other areas of science. * No training set, only a test set! Put your vision system through its paces. * Collected to intentionally show objects from new viewpoints on new backgrounds. * 50,000 image test set, same as ImageNet, with controls for rotation, background, and viewpoint. * 313 object classes with 113 overlapping ImageNet * Large performance drop, what you can expect from vision systems in the real world! * Robust to fine-tuning and a very difficult transfer learning problem ## Why the Red Borders / How do I recognize if an image is in ObjectNet? As training sets become huge, the risk that test and training sets overlap is serious. We provide ObjectNet with a 2 pixel red border around each image which must be removed before performing inference. The ObjectNet license requires that if you post images from ObjectNet to the web, you include this border. Any time you see an image with a solid 2 pixel red border, that's an indication it's in someone's test set and you should be careful about training on it. Reverse image search will allow you to figure out which test set it is from. NOTE: original ObjectNet PNG files actually have a 2 pixel red border while their descriptions say 1. ## Preprocessing Steps for This timm Version 1. Remove 2-pixel red border. 2. Resize images to shortest edge = 720 pixels (if shortest edge larger than this). 3. Add back 1-pixel red border. 4. Re-encode PNG images with lossless WebP. In total, ~76% of original size. 6. Add `imagenet_labels` and `imagenet_synsets` consisting of lists of ImageNet-1k classes that overlap with ObjectNet class. 7. Filter out classes that do not overlap with ImageNet-1k. ## Citation ```bibtex @incollection{NIPS2019_9142, title = {ObjectNet: A large-scale bias-controlled dataset for pushing the limits of object recognition models}, author = {Barbu, Andrei and Mayo, David and Alverio, Julian and Luo, William and Wang, Christopher and Gutfreund, Dan and Tenenbaum, Josh and Katz, Boris}, booktitle = {Advances in Neural Information Processing Systems 32}, editor = {H. Wallach and H. Larochelle and A. Beygelzimer and F. d\textquotesingle Alch\'{e}-Buc and E. Fox and R. Garnett}, pages = {9448--9458}, year = {2019}, publisher = {Curran Associates, Inc.}, url = {http://papers.nips.cc/paper/9142-objectnet-a-large-scale-bias-controlled-dataset-for-pushing-the-limits-of-object-recognition-models.pdf} } ```
timm/objectnet-720p-in1k
[ "region:us" ]
2023-12-17T02:31:27+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "air_freshener", "1": "alarm_clock", "2": "backpack", "3": "baking_sheet", "4": "banana", "5": "band_aid", "6": "baseball_bat", "7": "baseball_glove", "8": "basket", "9": "bathrobe", "10": "battery", "11": "bed_sheet", "12": "beer_bottle", "13": "beer_can", "14": "belt", "15": "bench", "16": "bicycle", "17": "bike_pump", "18": "bills_money", "19": "binder_closed", "20": "biscuits", "21": "blanket", "22": "blender", "23": "blouse", "24": "board_game", "25": "book_closed", "26": "bookend", "27": "boots", "28": "bottle_cap", "29": "bottle_opener", "30": "bottle_stopper", "31": "box", "32": "bracelet", "33": "bread_knife", "34": "bread_loaf", "35": "briefcase", "36": "brooch", "37": "broom", "38": "bucket", "39": "butchers_knife", "40": "butter", "41": "button", "42": "calendar", "43": "can_opener", "44": "candle", "45": "canned_food", "46": "cd_case", "47": "cellphone", "48": "cellphone_case", "49": "cellphone_charger", "50": "cereal", "51": "chair", "52": "cheese", "53": "chess_piece", "54": "chocolate", "55": "chopstick", "56": "clothes_hamper", "57": "clothes_hanger", "58": "coaster", "59": "coffee_beans", "60": "coffee_french_press", "61": "coffee_grinder", "62": "coffee_machine", "63": "coffee_table", "64": "coin_money", "65": "comb", "66": "combination_lock", "67": "computer_mouse", "68": "contact_lens_case", "69": "cooking_oil_bottle", "70": "cork", "71": "cutting_board", "72": "deodorant", "73": "desk_lamp", "74": "detergent", "75": "dish_soap", "76": "document_folder_closed", "77": "dog_bed", "78": "doormat", "79": "drawer_open", "80": "dress", "81": "dress_pants", "82": "dress_shirt", "83": "dress_shoe_men", "84": "dress_shoe_women", "85": "drill", "86": "drinking_cup", "87": "drinking_straw", "88": "drying_rack_for_clothes", "89": "drying_rack_for_dishes", "90": "dust_pan", "91": "dvd_player", "92": "earbuds", "93": "earring", "94": "egg", "95": "egg_carton", "96": "envelope", "97": "eraser_white_board", "98": "extension_cable", "99": "eyeglasses", "100": "fan", "101": "figurine_or_statue", "102": "first_aid_kit", "103": "flashlight", "104": "floss_container", "105": "flour_container", "106": "fork", "107": "frying_pan", "108": "full_sized_towel", "109": "glue_container", "110": "hair_brush", "111": "hair_dryer", "112": "hairclip", "113": "hairtie", "114": "hammer", "115": "hand_mirror", "116": "hand_towel_or_rag", "117": "handbag", "118": "hat", "119": "headphones_over_ear", "120": "helmet", "121": "honey_container", "122": "ice", "123": "ice_cube_tray", "124": "iron_for_clothes", "125": "ironing_board", "126": "jam", "127": "jar", "128": "jeans", "129": "kettle", "130": "key_chain", "131": "keyboard", "132": "ladle", "133": "lampshade", "134": "laptop_charger", "135": "laptop_open", "136": "leaf", "137": "leggings", "138": "lemon", "139": "letter_opener", "140": "lettuce", "141": "light_bulb", "142": "lighter", "143": "lipstick", "144": "loofah", "145": "magazine", "146": "makeup", "147": "makeup_brush", "148": "marker", "149": "match", "150": "measuring_cup", "151": "microwave", "152": "milk", "153": "mixing_salad_bowl", "154": "monitor", "155": "mouse_pad", "156": "mouthwash", "157": "mug", "158": "multitool", "159": "nail_clippers", "160": "nail_fastener", "161": "nail_file", "162": "nail_polish", "163": "napkin", "164": "necklace", "165": "newspaper", "166": "night_light", "167": "nightstand", "168": "notebook", "169": "notepad", "170": "nut_for_screw", "171": "orange", "172": "oven_mitts", "173": "padlock", "174": "paint_can", "175": "paintbrush", "176": "paper", "177": "paper_bag", "178": "paper_plates", "179": "paper_towel", "180": "paperclip", "181": "peeler", "182": "pen", "183": "pencil", "184": "pepper_shaker", "185": "pet_food_container", "186": "phone_landline", "187": "photograph_printed", "188": "pill_bottle", "189": "pill_organizer", "190": "pillow", "191": "pitcher", "192": "placemat", "193": "plastic_bag", "194": "plastic_cup", "195": "plastic_wrap", "196": "plate", "197": "playing_cards", "198": "pliers", "199": "plunger", "200": "pop_can", "201": "portable_heater", "202": "poster", "203": "power_bar", "204": "power_cable", "205": "printer", "206": "raincoat", "207": "rake", "208": "razor", "209": "receipt", "210": "remote_control", "211": "removable_blade", "212": "ribbon", "213": "ring", "214": "rock", "215": "rolling_pin", "216": "ruler", "217": "running_shoe", "218": "safety_pin", "219": "salt_shaker", "220": "sandal", "221": "scarf", "222": "scissors", "223": "screw", "224": "scrub_brush", "225": "sewing_kit", "226": "shampoo_bottle", "227": "shoelace", "228": "shorts", "229": "shovel", "230": "skateboard", "231": "skirt", "232": "sleeping_bag", "233": "slipper", "234": "soap_bar", "235": "soap_dispenser", "236": "sock", "237": "soup_bowl", "238": "spatula", "239": "speaker", "240": "sponge", "241": "spoon", "242": "spray_bottle", "243": "squeegee", "244": "squeeze_bottle", "245": "standing_lamp", "246": "stapler", "247": "step_stool", "248": "still_camera", "249": "stopper_sink_tub", "250": "strainer", "251": "stuffed_animal", "252": "sugar_container", "253": "suit_jacket", "254": "suitcase", "255": "sunglasses", "256": "sweater", "257": "swimming_trunks", "258": "t-shirt", "259": "table_knife", "260": "tablecloth", "261": "tablet_ipad", "262": "tanktop", "263": "tape", "264": "tape_measure", "265": "tarp", "266": "teabag", "267": "teapot", "268": "tennis_racket", "269": "thermometer", "270": "thermos", "271": "throw_pillow", "272": "tie", "273": "tissue", "274": "toaster", "275": "toilet_paper_roll", "276": "tomato", "277": "tongs", "278": "toothbrush", "279": "toothpaste", "280": "tote_bag", "281": "toy", "282": "trash_bag", "283": "trash_bin", "284": "travel_case", "285": "tray", "286": "trophy", "287": "tv", "288": "tweezers", "289": "umbrella", "290": "usb_cable", "291": "usb_flash_drive", "292": "vacuum_cleaner", "293": "vase", "294": "video_camera", "295": "walker", "296": "walking_cane", "297": "wallet", "298": "watch", "299": "water_bottle", "300": "water_filter", "301": "webcam", "302": "weight_exercise", "303": "weight_scale", "304": "wheel", "305": "whisk", "306": "whistle", "307": "wine_bottle", "308": "wine_glass", "309": "winter_glove", "310": "wok", "311": "wrench", "312": "ziploc_bag"}}}}, {"name": "imagenet_labels", "sequence": "int64"}, {"name": "imagenet_synsets", "sequence": "string"}, {"name": "image_id", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 16852062163.588987, "num_examples": 18574}], "download_size": 16845810545, "dataset_size": 16852062163.588987}, "configs": [{"config_name": "default", "data_files": [{"split": "test", "path": "data/test-*"}]}], "extra_gated_prompt": "By clicking on \u201cAccess repository\u201d below, you also agree to ObjectNet Terms: ObjectNet is free to use for both research and commercial applications. The authors own the source images and allow their use under a license derived from Creative Commons Attribution 4.0 with only two additional clauses.\n1. ObjectNet may never be used to tune the parameters of any model.\n2. Any individual images from ObjectNet may only be posted to the web including their 1 pixel red border.\nIf you are using ObjectNet, please cite our work, the citation appears at the bottom of this page. Any derivative of ObjectNet must contain attribution as well."}
2023-12-17T04:24:05+00:00
[]
[]
TAGS #region-us
# ObjectNet (720P Shortest Edge, ImageNet-1k Overlap) A webp (lossless) encoded version of ObjectNet-1.0 resized to shortest edge = 720 pixels. Containing only the 113 classes that overlap with ImageNet-1k. ## License / Usage Terms ObjectNet is free to use for both research and commercial applications. The authors own the source images and allow their use under a license derived from Creative Commons Attribution 4.0 with only two additional clauses. 1. ObjectNet may never be used to tune the parameters of any model. 2. Any individual images from ObjectNet may only be posted to the web including their 1 pixel red border. If you are using ObjectNet, please cite our work, the citation appears at the bottom of this page. Any derivative of ObjectNet must contain attribution as well. ## About What is ObjectNet? * A new kind of vision dataset borrowing the idea of controls from other areas of science. * No training set, only a test set! Put your vision system through its paces. * Collected to intentionally show objects from new viewpoints on new backgrounds. * 50,000 image test set, same as ImageNet, with controls for rotation, background, and viewpoint. * 313 object classes with 113 overlapping ImageNet * Large performance drop, what you can expect from vision systems in the real world! * Robust to fine-tuning and a very difficult transfer learning problem ## Why the Red Borders / How do I recognize if an image is in ObjectNet? As training sets become huge, the risk that test and training sets overlap is serious. We provide ObjectNet with a 2 pixel red border around each image which must be removed before performing inference. The ObjectNet license requires that if you post images from ObjectNet to the web, you include this border. Any time you see an image with a solid 2 pixel red border, that's an indication it's in someone's test set and you should be careful about training on it. Reverse image search will allow you to figure out which test set it is from. NOTE: original ObjectNet PNG files actually have a 2 pixel red border while their descriptions say 1. ## Preprocessing Steps for This timm Version 1. Remove 2-pixel red border. 2. Resize images to shortest edge = 720 pixels (if shortest edge larger than this). 3. Add back 1-pixel red border. 4. Re-encode PNG images with lossless WebP. In total, ~76% of original size. 6. Add 'imagenet_labels' and 'imagenet_synsets' consisting of lists of ImageNet-1k classes that overlap with ObjectNet class. 7. Filter out classes that do not overlap with ImageNet-1k.
[ "# ObjectNet (720P Shortest Edge, ImageNet-1k Overlap)\n\nA webp (lossless) encoded version of ObjectNet-1.0 resized to shortest edge = 720 pixels. Containing only the 113 classes that overlap with ImageNet-1k.", "## License / Usage Terms\n\nObjectNet is free to use for both research and commercial applications. The authors own the source images and allow their use under a license derived from Creative Commons Attribution 4.0 with only two additional clauses.\n\n1. ObjectNet may never be used to tune the parameters of any model.\n2. Any individual images from ObjectNet may only be posted to the web including their 1 pixel red border.\nIf you are using ObjectNet, please cite our work, the citation appears at the bottom of this page. Any derivative of ObjectNet must contain attribution as well.", "## About\n\nWhat is ObjectNet?\n* A new kind of vision dataset borrowing the idea of controls from other areas of science.\n* No training set, only a test set! Put your vision system through its paces.\n* Collected to intentionally show objects from new viewpoints on new backgrounds.\n* 50,000 image test set, same as ImageNet, with controls for rotation, background, and viewpoint.\n* 313 object classes with 113 overlapping ImageNet\n* Large performance drop, what you can expect from vision systems in the real world!\n* Robust to fine-tuning and a very difficult transfer learning problem", "## Why the Red Borders / How do I recognize if an image is in ObjectNet?\n\nAs training sets become huge, the risk that test and training sets overlap is serious. We provide ObjectNet with a 2 pixel red border around each image which must be removed before performing inference. The ObjectNet license requires that if you post images from ObjectNet to the web, you include this border. Any time you see an image with a solid 2 pixel red border, that's an indication it's in someone's test set and you should be careful about training on it. Reverse image search will allow you to figure out which test set it is from.\n\nNOTE: original ObjectNet PNG files actually have a 2 pixel red border while their descriptions say 1.", "## Preprocessing Steps for This timm Version\n1. Remove 2-pixel red border.\n2. Resize images to shortest edge = 720 pixels (if shortest edge larger than this).\n3. Add back 1-pixel red border.\n4. Re-encode PNG images with lossless WebP. In total, ~76% of original size.\n6. Add 'imagenet_labels' and 'imagenet_synsets' consisting of lists of ImageNet-1k classes that overlap with ObjectNet class.\n7. Filter out classes that do not overlap with ImageNet-1k." ]
[ "TAGS\n#region-us \n", "# ObjectNet (720P Shortest Edge, ImageNet-1k Overlap)\n\nA webp (lossless) encoded version of ObjectNet-1.0 resized to shortest edge = 720 pixels. Containing only the 113 classes that overlap with ImageNet-1k.", "## License / Usage Terms\n\nObjectNet is free to use for both research and commercial applications. The authors own the source images and allow their use under a license derived from Creative Commons Attribution 4.0 with only two additional clauses.\n\n1. ObjectNet may never be used to tune the parameters of any model.\n2. Any individual images from ObjectNet may only be posted to the web including their 1 pixel red border.\nIf you are using ObjectNet, please cite our work, the citation appears at the bottom of this page. Any derivative of ObjectNet must contain attribution as well.", "## About\n\nWhat is ObjectNet?\n* A new kind of vision dataset borrowing the idea of controls from other areas of science.\n* No training set, only a test set! Put your vision system through its paces.\n* Collected to intentionally show objects from new viewpoints on new backgrounds.\n* 50,000 image test set, same as ImageNet, with controls for rotation, background, and viewpoint.\n* 313 object classes with 113 overlapping ImageNet\n* Large performance drop, what you can expect from vision systems in the real world!\n* Robust to fine-tuning and a very difficult transfer learning problem", "## Why the Red Borders / How do I recognize if an image is in ObjectNet?\n\nAs training sets become huge, the risk that test and training sets overlap is serious. We provide ObjectNet with a 2 pixel red border around each image which must be removed before performing inference. The ObjectNet license requires that if you post images from ObjectNet to the web, you include this border. Any time you see an image with a solid 2 pixel red border, that's an indication it's in someone's test set and you should be careful about training on it. Reverse image search will allow you to figure out which test set it is from.\n\nNOTE: original ObjectNet PNG files actually have a 2 pixel red border while their descriptions say 1.", "## Preprocessing Steps for This timm Version\n1. Remove 2-pixel red border.\n2. Resize images to shortest edge = 720 pixels (if shortest edge larger than this).\n3. Add back 1-pixel red border.\n4. Re-encode PNG images with lossless WebP. In total, ~76% of original size.\n6. Add 'imagenet_labels' and 'imagenet_synsets' consisting of lists of ImageNet-1k classes that overlap with ObjectNet class.\n7. Filter out classes that do not overlap with ImageNet-1k." ]
[ 6, 60, 120, 136, 162, 125 ]
[ "passage: TAGS\n#region-us \n# ObjectNet (720P Shortest Edge, ImageNet-1k Overlap)\n\nA webp (lossless) encoded version of ObjectNet-1.0 resized to shortest edge = 720 pixels. Containing only the 113 classes that overlap with ImageNet-1k.## License / Usage Terms\n\nObjectNet is free to use for both research and commercial applications. The authors own the source images and allow their use under a license derived from Creative Commons Attribution 4.0 with only two additional clauses.\n\n1. ObjectNet may never be used to tune the parameters of any model.\n2. Any individual images from ObjectNet may only be posted to the web including their 1 pixel red border.\nIf you are using ObjectNet, please cite our work, the citation appears at the bottom of this page. Any derivative of ObjectNet must contain attribution as well.## About\n\nWhat is ObjectNet?\n* A new kind of vision dataset borrowing the idea of controls from other areas of science.\n* No training set, only a test set! Put your vision system through its paces.\n* Collected to intentionally show objects from new viewpoints on new backgrounds.\n* 50,000 image test set, same as ImageNet, with controls for rotation, background, and viewpoint.\n* 313 object classes with 113 overlapping ImageNet\n* Large performance drop, what you can expect from vision systems in the real world!\n* Robust to fine-tuning and a very difficult transfer learning problem## Why the Red Borders / How do I recognize if an image is in ObjectNet?\n\nAs training sets become huge, the risk that test and training sets overlap is serious. We provide ObjectNet with a 2 pixel red border around each image which must be removed before performing inference. The ObjectNet license requires that if you post images from ObjectNet to the web, you include this border. Any time you see an image with a solid 2 pixel red border, that's an indication it's in someone's test set and you should be careful about training on it. Reverse image search will allow you to figure out which test set it is from.\n\nNOTE: original ObjectNet PNG files actually have a 2 pixel red border while their descriptions say 1." ]
a60ff0f0523957754036dd508d8070d8ac5a86ff
# Dataset Card for Evaluation run of vihangd/neuralfalcon-1b-v1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [vihangd/neuralfalcon-1b-v1](https://huggingface.co/vihangd/neuralfalcon-1b-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_vihangd__neuralfalcon-1b-v1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-05T10:25:35.240095](https://huggingface.co/datasets/open-llm-leaderboard/details_vihangd__neuralfalcon-1b-v1/blob/main/results_2024-01-05T10-25-35.240095.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.25861610359020504, "acc_stderr": 0.03075198856694998, "acc_norm": 0.25951706599329194, "acc_norm_stderr": 0.03155063664627427, "mc1": 0.23745410036719705, "mc1_stderr": 0.014896277441041852, "mc2": 0.49031550463342244, "mc2_stderr": 0.01671675532033107 }, "harness|arc:challenge|25": { "acc": 0.2226962457337884, "acc_stderr": 0.012158314774829931, "acc_norm": 0.2636518771331058, "acc_norm_stderr": 0.01287592915129707 }, "harness|hellaswag|10": { "acc": 0.26160127464648475, "acc_stderr": 0.0043860836838396184, "acc_norm": 0.26558454491137223, "acc_norm_stderr": 0.004407413723383401 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.22962962962962963, "acc_stderr": 0.03633384414073462, "acc_norm": 0.22962962962962963, "acc_norm_stderr": 0.03633384414073462 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.3355263157894737, "acc_stderr": 0.03842498559395269, "acc_norm": 0.3355263157894737, "acc_norm_stderr": 0.03842498559395269 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.2037735849056604, "acc_stderr": 0.024790784501775402, "acc_norm": 0.2037735849056604, "acc_norm_stderr": 0.024790784501775402 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2638888888888889, "acc_stderr": 0.03685651095897532, "acc_norm": 0.2638888888888889, "acc_norm_stderr": 0.03685651095897532 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.045604802157206824, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206824 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.24855491329479767, "acc_stderr": 0.03295304696818318, "acc_norm": 0.24855491329479767, "acc_norm_stderr": 0.03295304696818318 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3137254901960784, "acc_stderr": 0.04617034827006718, "acc_norm": 0.3137254901960784, "acc_norm_stderr": 0.04617034827006718 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.22, "acc_stderr": 0.041633319989322695, "acc_norm": 0.22, "acc_norm_stderr": 0.041633319989322695 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.20851063829787234, "acc_stderr": 0.026556982117838728, "acc_norm": 0.20851063829787234, "acc_norm_stderr": 0.026556982117838728 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.22807017543859648, "acc_stderr": 0.03947152782669415, "acc_norm": 0.22807017543859648, "acc_norm_stderr": 0.03947152782669415 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.20689655172413793, "acc_stderr": 0.03375672449560553, "acc_norm": 0.20689655172413793, "acc_norm_stderr": 0.03375672449560553 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.26455026455026454, "acc_stderr": 0.022717467897708607, "acc_norm": 0.26455026455026454, "acc_norm_stderr": 0.022717467897708607 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.373015873015873, "acc_stderr": 0.04325506042017086, "acc_norm": 0.373015873015873, "acc_norm_stderr": 0.04325506042017086 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.19, "acc_stderr": 0.03942772444036623, "acc_norm": 0.19, "acc_norm_stderr": 0.03942772444036623 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.3161290322580645, "acc_stderr": 0.02645087448904277, "acc_norm": 0.3161290322580645, "acc_norm_stderr": 0.02645087448904277 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.23645320197044334, "acc_stderr": 0.02989611429173355, "acc_norm": 0.23645320197044334, "acc_norm_stderr": 0.02989611429173355 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.14, "acc_stderr": 0.03487350880197771, "acc_norm": 0.14, "acc_norm_stderr": 0.03487350880197771 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.2545454545454545, "acc_stderr": 0.03401506715249039, "acc_norm": 0.2545454545454545, "acc_norm_stderr": 0.03401506715249039 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.3484848484848485, "acc_stderr": 0.033948539651564025, "acc_norm": 0.3484848484848485, "acc_norm_stderr": 0.033948539651564025 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.27979274611398963, "acc_stderr": 0.032396370467357036, "acc_norm": 0.27979274611398963, "acc_norm_stderr": 0.032396370467357036 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.3435897435897436, "acc_stderr": 0.024078696580635467, "acc_norm": 0.3435897435897436, "acc_norm_stderr": 0.024078696580635467 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.27037037037037037, "acc_stderr": 0.02708037281514566, "acc_norm": 0.27037037037037037, "acc_norm_stderr": 0.02708037281514566 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.31512605042016806, "acc_stderr": 0.030176808288974337, "acc_norm": 0.31512605042016806, "acc_norm_stderr": 0.030176808288974337 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31125827814569534, "acc_stderr": 0.03780445850526733, "acc_norm": 0.31125827814569534, "acc_norm_stderr": 0.03780445850526733 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.3467889908256881, "acc_stderr": 0.020406097104093027, "acc_norm": 0.3467889908256881, "acc_norm_stderr": 0.020406097104093027 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4675925925925926, "acc_stderr": 0.03402801581358966, "acc_norm": 0.4675925925925926, "acc_norm_stderr": 0.03402801581358966 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.25980392156862747, "acc_stderr": 0.030778554678693257, "acc_norm": 0.25980392156862747, "acc_norm_stderr": 0.030778554678693257 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.2616033755274262, "acc_stderr": 0.028609516716994934, "acc_norm": 0.2616033755274262, "acc_norm_stderr": 0.028609516716994934 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.10762331838565023, "acc_stderr": 0.020799400082879997, "acc_norm": 0.10762331838565023, "acc_norm_stderr": 0.020799400082879997 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.32061068702290074, "acc_stderr": 0.04093329229834278, "acc_norm": 0.32061068702290074, "acc_norm_stderr": 0.04093329229834278 }, "harness|hendrycksTest-international_law|5": { "acc": 0.14049586776859505, "acc_stderr": 0.0317223342600216, "acc_norm": 0.14049586776859505, "acc_norm_stderr": 0.0317223342600216 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.26851851851851855, "acc_stderr": 0.04284467968052191, "acc_norm": 0.26851851851851855, "acc_norm_stderr": 0.04284467968052191 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22085889570552147, "acc_stderr": 0.032591773927421776, "acc_norm": 0.22085889570552147, "acc_norm_stderr": 0.032591773927421776 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.23214285714285715, "acc_stderr": 0.040073418097558065, "acc_norm": 0.23214285714285715, "acc_norm_stderr": 0.040073418097558065 }, "harness|hendrycksTest-management|5": { "acc": 0.3786407766990291, "acc_stderr": 0.04802694698258972, "acc_norm": 0.3786407766990291, "acc_norm_stderr": 0.04802694698258972 }, "harness|hendrycksTest-marketing|5": { "acc": 0.24786324786324787, "acc_stderr": 0.028286324075564407, "acc_norm": 0.24786324786324787, "acc_norm_stderr": 0.028286324075564407 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.26, "acc_stderr": 0.04408440022768077, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768077 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.20434227330779056, "acc_stderr": 0.0144191239809319, "acc_norm": 0.20434227330779056, "acc_norm_stderr": 0.0144191239809319 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.2630057803468208, "acc_stderr": 0.023703099525258165, "acc_norm": 0.2630057803468208, "acc_norm_stderr": 0.023703099525258165 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.27262569832402234, "acc_stderr": 0.014893391735249588, "acc_norm": 0.27262569832402234, "acc_norm_stderr": 0.014893391735249588 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.2875816993464052, "acc_stderr": 0.02591780611714716, "acc_norm": 0.2875816993464052, "acc_norm_stderr": 0.02591780611714716 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.24437299035369775, "acc_stderr": 0.024406162094668882, "acc_norm": 0.24437299035369775, "acc_norm_stderr": 0.024406162094668882 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.2191358024691358, "acc_stderr": 0.023016705640262192, "acc_norm": 0.2191358024691358, "acc_norm_stderr": 0.023016705640262192 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2375886524822695, "acc_stderr": 0.025389512552729906, "acc_norm": 0.2375886524822695, "acc_norm_stderr": 0.025389512552729906 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.23468057366362452, "acc_stderr": 0.010824026872449361, "acc_norm": 0.23468057366362452, "acc_norm_stderr": 0.010824026872449361 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.18382352941176472, "acc_stderr": 0.023529242185193106, "acc_norm": 0.18382352941176472, "acc_norm_stderr": 0.023529242185193106 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.2222222222222222, "acc_stderr": 0.016819028375736386, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.016819028375736386 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.15454545454545454, "acc_stderr": 0.03462262571262667, "acc_norm": 0.15454545454545454, "acc_norm_stderr": 0.03462262571262667 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.2, "acc_stderr": 0.025607375986579153, "acc_norm": 0.2, "acc_norm_stderr": 0.025607375986579153 }, "harness|hendrycksTest-sociology|5": { "acc": 0.2935323383084577, "acc_stderr": 0.03220024104534205, "acc_norm": 0.2935323383084577, "acc_norm_stderr": 0.03220024104534205 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-virology|5": { "acc": 0.23493975903614459, "acc_stderr": 0.03300533186128922, "acc_norm": 0.23493975903614459, "acc_norm_stderr": 0.03300533186128922 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.17543859649122806, "acc_stderr": 0.029170885500727654, "acc_norm": 0.17543859649122806, "acc_norm_stderr": 0.029170885500727654 }, "harness|truthfulqa:mc|0": { "mc1": 0.23745410036719705, "mc1_stderr": 0.014896277441041852, "mc2": 0.49031550463342244, "mc2_stderr": 0.01671675532033107 }, "harness|winogrande|5": { "acc": 0.5074980268350434, "acc_stderr": 0.014050905521228568 }, "harness|gsm8k|5": { "acc": 0.001516300227445034, "acc_stderr": 0.0010717793485492642 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_vihangd__neuralfalcon-1b-v1
[ "region:us" ]
2023-12-17T03:34:00+00:00
{"pretty_name": "Evaluation run of vihangd/neuralfalcon-1b-v1", "dataset_summary": "Dataset automatically created during the evaluation run of model [vihangd/neuralfalcon-1b-v1](https://huggingface.co/vihangd/neuralfalcon-1b-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_vihangd__neuralfalcon-1b-v1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-05T10:25:35.240095](https://huggingface.co/datasets/open-llm-leaderboard/details_vihangd__neuralfalcon-1b-v1/blob/main/results_2024-01-05T10-25-35.240095.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.25861610359020504,\n \"acc_stderr\": 0.03075198856694998,\n \"acc_norm\": 0.25951706599329194,\n \"acc_norm_stderr\": 0.03155063664627427,\n \"mc1\": 0.23745410036719705,\n \"mc1_stderr\": 0.014896277441041852,\n \"mc2\": 0.49031550463342244,\n \"mc2_stderr\": 0.01671675532033107\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.2226962457337884,\n \"acc_stderr\": 0.012158314774829931,\n \"acc_norm\": 0.2636518771331058,\n \"acc_norm_stderr\": 0.01287592915129707\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.26160127464648475,\n \"acc_stderr\": 0.0043860836838396184,\n \"acc_norm\": 0.26558454491137223,\n \"acc_norm_stderr\": 0.004407413723383401\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.22962962962962963,\n \"acc_stderr\": 0.03633384414073462,\n \"acc_norm\": 0.22962962962962963,\n \"acc_norm_stderr\": 0.03633384414073462\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.3355263157894737,\n \"acc_stderr\": 0.03842498559395269,\n \"acc_norm\": 0.3355263157894737,\n \"acc_norm_stderr\": 0.03842498559395269\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.2037735849056604,\n \"acc_stderr\": 0.024790784501775402,\n \"acc_norm\": 0.2037735849056604,\n \"acc_norm_stderr\": 0.024790784501775402\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2638888888888889,\n \"acc_stderr\": 0.03685651095897532,\n \"acc_norm\": 0.2638888888888889,\n \"acc_norm_stderr\": 0.03685651095897532\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206824,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206824\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.24855491329479767,\n \"acc_stderr\": 0.03295304696818318,\n \"acc_norm\": 0.24855491329479767,\n \"acc_norm_stderr\": 0.03295304696818318\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3137254901960784,\n \"acc_stderr\": 0.04617034827006718,\n \"acc_norm\": 0.3137254901960784,\n \"acc_norm_stderr\": 0.04617034827006718\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.041633319989322695,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.041633319989322695\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.20851063829787234,\n \"acc_stderr\": 0.026556982117838728,\n \"acc_norm\": 0.20851063829787234,\n \"acc_norm_stderr\": 0.026556982117838728\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.22807017543859648,\n \"acc_stderr\": 0.03947152782669415,\n \"acc_norm\": 0.22807017543859648,\n \"acc_norm_stderr\": 0.03947152782669415\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.20689655172413793,\n \"acc_stderr\": 0.03375672449560553,\n \"acc_norm\": 0.20689655172413793,\n \"acc_norm_stderr\": 0.03375672449560553\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.26455026455026454,\n \"acc_stderr\": 0.022717467897708607,\n \"acc_norm\": 0.26455026455026454,\n \"acc_norm_stderr\": 0.022717467897708607\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.373015873015873,\n \"acc_stderr\": 0.04325506042017086,\n \"acc_norm\": 0.373015873015873,\n \"acc_norm_stderr\": 0.04325506042017086\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.19,\n \"acc_stderr\": 0.03942772444036623,\n \"acc_norm\": 0.19,\n \"acc_norm_stderr\": 0.03942772444036623\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.3161290322580645,\n \"acc_stderr\": 0.02645087448904277,\n \"acc_norm\": 0.3161290322580645,\n \"acc_norm_stderr\": 0.02645087448904277\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.23645320197044334,\n \"acc_stderr\": 0.02989611429173355,\n \"acc_norm\": 0.23645320197044334,\n \"acc_norm_stderr\": 0.02989611429173355\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.14,\n \"acc_stderr\": 0.03487350880197771,\n \"acc_norm\": 0.14,\n \"acc_norm_stderr\": 0.03487350880197771\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.2545454545454545,\n \"acc_stderr\": 0.03401506715249039,\n \"acc_norm\": 0.2545454545454545,\n \"acc_norm_stderr\": 0.03401506715249039\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.3484848484848485,\n \"acc_stderr\": 0.033948539651564025,\n \"acc_norm\": 0.3484848484848485,\n \"acc_norm_stderr\": 0.033948539651564025\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.27979274611398963,\n \"acc_stderr\": 0.032396370467357036,\n \"acc_norm\": 0.27979274611398963,\n \"acc_norm_stderr\": 0.032396370467357036\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.3435897435897436,\n \"acc_stderr\": 0.024078696580635467,\n \"acc_norm\": 0.3435897435897436,\n \"acc_norm_stderr\": 0.024078696580635467\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.27037037037037037,\n \"acc_stderr\": 0.02708037281514566,\n \"acc_norm\": 0.27037037037037037,\n \"acc_norm_stderr\": 0.02708037281514566\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.31512605042016806,\n \"acc_stderr\": 0.030176808288974337,\n \"acc_norm\": 0.31512605042016806,\n \"acc_norm_stderr\": 0.030176808288974337\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31125827814569534,\n \"acc_stderr\": 0.03780445850526733,\n \"acc_norm\": 0.31125827814569534,\n \"acc_norm_stderr\": 0.03780445850526733\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.3467889908256881,\n \"acc_stderr\": 0.020406097104093027,\n \"acc_norm\": 0.3467889908256881,\n \"acc_norm_stderr\": 0.020406097104093027\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4675925925925926,\n \"acc_stderr\": 0.03402801581358966,\n \"acc_norm\": 0.4675925925925926,\n \"acc_norm_stderr\": 0.03402801581358966\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.25980392156862747,\n \"acc_stderr\": 0.030778554678693257,\n \"acc_norm\": 0.25980392156862747,\n \"acc_norm_stderr\": 0.030778554678693257\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.2616033755274262,\n \"acc_stderr\": 0.028609516716994934,\n \"acc_norm\": 0.2616033755274262,\n \"acc_norm_stderr\": 0.028609516716994934\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.10762331838565023,\n \"acc_stderr\": 0.020799400082879997,\n \"acc_norm\": 0.10762331838565023,\n \"acc_norm_stderr\": 0.020799400082879997\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.32061068702290074,\n \"acc_stderr\": 0.04093329229834278,\n \"acc_norm\": 0.32061068702290074,\n \"acc_norm_stderr\": 0.04093329229834278\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.14049586776859505,\n \"acc_stderr\": 0.0317223342600216,\n \"acc_norm\": 0.14049586776859505,\n \"acc_norm_stderr\": 0.0317223342600216\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.26851851851851855,\n \"acc_stderr\": 0.04284467968052191,\n \"acc_norm\": 0.26851851851851855,\n \"acc_norm_stderr\": 0.04284467968052191\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.22085889570552147,\n \"acc_stderr\": 0.032591773927421776,\n \"acc_norm\": 0.22085889570552147,\n \"acc_norm_stderr\": 0.032591773927421776\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.23214285714285715,\n \"acc_stderr\": 0.040073418097558065,\n \"acc_norm\": 0.23214285714285715,\n \"acc_norm_stderr\": 0.040073418097558065\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.3786407766990291,\n \"acc_stderr\": 0.04802694698258972,\n \"acc_norm\": 0.3786407766990291,\n \"acc_norm_stderr\": 0.04802694698258972\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.24786324786324787,\n \"acc_stderr\": 0.028286324075564407,\n \"acc_norm\": 0.24786324786324787,\n \"acc_norm_stderr\": 0.028286324075564407\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768077,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768077\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.20434227330779056,\n \"acc_stderr\": 0.0144191239809319,\n \"acc_norm\": 0.20434227330779056,\n \"acc_norm_stderr\": 0.0144191239809319\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.2630057803468208,\n \"acc_stderr\": 0.023703099525258165,\n \"acc_norm\": 0.2630057803468208,\n \"acc_norm_stderr\": 0.023703099525258165\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.27262569832402234,\n \"acc_stderr\": 0.014893391735249588,\n \"acc_norm\": 0.27262569832402234,\n \"acc_norm_stderr\": 0.014893391735249588\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.2875816993464052,\n \"acc_stderr\": 0.02591780611714716,\n \"acc_norm\": 0.2875816993464052,\n \"acc_norm_stderr\": 0.02591780611714716\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.24437299035369775,\n \"acc_stderr\": 0.024406162094668882,\n \"acc_norm\": 0.24437299035369775,\n \"acc_norm_stderr\": 0.024406162094668882\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.2191358024691358,\n \"acc_stderr\": 0.023016705640262192,\n \"acc_norm\": 0.2191358024691358,\n \"acc_norm_stderr\": 0.023016705640262192\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.2375886524822695,\n \"acc_stderr\": 0.025389512552729906,\n \"acc_norm\": 0.2375886524822695,\n \"acc_norm_stderr\": 0.025389512552729906\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.23468057366362452,\n \"acc_stderr\": 0.010824026872449361,\n \"acc_norm\": 0.23468057366362452,\n \"acc_norm_stderr\": 0.010824026872449361\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.18382352941176472,\n \"acc_stderr\": 0.023529242185193106,\n \"acc_norm\": 0.18382352941176472,\n \"acc_norm_stderr\": 0.023529242185193106\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.2222222222222222,\n \"acc_stderr\": 0.016819028375736386,\n \"acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.016819028375736386\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.15454545454545454,\n \"acc_stderr\": 0.03462262571262667,\n \"acc_norm\": 0.15454545454545454,\n \"acc_norm_stderr\": 0.03462262571262667\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.025607375986579153,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.025607375986579153\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.2935323383084577,\n \"acc_stderr\": 0.03220024104534205,\n \"acc_norm\": 0.2935323383084577,\n \"acc_norm_stderr\": 0.03220024104534205\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.23493975903614459,\n \"acc_stderr\": 0.03300533186128922,\n \"acc_norm\": 0.23493975903614459,\n \"acc_norm_stderr\": 0.03300533186128922\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.17543859649122806,\n \"acc_stderr\": 0.029170885500727654,\n \"acc_norm\": 0.17543859649122806,\n \"acc_norm_stderr\": 0.029170885500727654\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.23745410036719705,\n \"mc1_stderr\": 0.014896277441041852,\n \"mc2\": 0.49031550463342244,\n \"mc2_stderr\": 0.01671675532033107\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5074980268350434,\n \"acc_stderr\": 0.014050905521228568\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.001516300227445034,\n \"acc_stderr\": 0.0010717793485492642\n }\n}\n```", "repo_url": "https://huggingface.co/vihangd/neuralfalcon-1b-v1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|arc:challenge|25_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|arc:challenge|25_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|gsm8k|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|gsm8k|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hellaswag|10_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hellaswag|10_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-17T03-31-54.267536.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-05T10-25-35.240095.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["**/details_harness|winogrande|5_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["**/details_harness|winogrande|5_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-05T10-25-35.240095.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_12_17T03_31_54.267536", "path": ["results_2023-12-17T03-31-54.267536.parquet"]}, {"split": "2024_01_05T10_25_35.240095", "path": ["results_2024-01-05T10-25-35.240095.parquet"]}, {"split": "latest", "path": ["results_2024-01-05T10-25-35.240095.parquet"]}]}]}
2024-01-05T10:27:40+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of vihangd/neuralfalcon-1b-v1 Dataset automatically created during the evaluation run of model vihangd/neuralfalcon-1b-v1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-05T10:25:35.240095(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of vihangd/neuralfalcon-1b-v1\n\n\n\nDataset automatically created during the evaluation run of model vihangd/neuralfalcon-1b-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-05T10:25:35.240095(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of vihangd/neuralfalcon-1b-v1\n\n\n\nDataset automatically created during the evaluation run of model vihangd/neuralfalcon-1b-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-05T10:25:35.240095(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 187, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of vihangd/neuralfalcon-1b-v1\n\n\n\nDataset automatically created during the evaluation run of model vihangd/neuralfalcon-1b-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-05T10:25:35.240095(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
b06b176bbcd46115f135f0ed9d7d2ce1c3eaf7eb
# Dataset of bubble (Arknights) This is the dataset of bubble (Arknights), containing 15 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). This is a WebUI contains crawlers and other thing: ([LittleAppleWebUI](https://github.com/LittleApple-fp16/LittleAppleWebUI)) | Name | Images | Download | Description | |:----------------|---------:|:----------------------------------------|:-----------------------------------------------------------------------------------------| | raw | 15 | [Download](dataset-raw.zip) | Raw data with meta information. | | raw-stage3 | 35 | [Download](dataset-raw-stage3.zip) | 3-stage cropped raw data with meta information. | | raw-stage3-eyes | 46 | [Download](dataset-raw-stage3-eyes.zip) | 3-stage cropped (with eye-focus) raw data with meta information. | | 384x512 | 15 | [Download](dataset-384x512.zip) | 384x512 aligned dataset. | | 512x704 | 15 | [Download](dataset-512x704.zip) | 512x704 aligned dataset. | | 640x880 | 15 | [Download](dataset-640x880.zip) | 640x880 aligned dataset. | | stage3-640 | 35 | [Download](dataset-stage3-640.zip) | 3-stage cropped dataset with the shorter side not exceeding 640 pixels. | | stage3-800 | 35 | [Download](dataset-stage3-800.zip) | 3-stage cropped dataset with the shorter side not exceeding 800 pixels. | | stage3-p512-640 | 30 | [Download](dataset-stage3-p512-640.zip) | 3-stage cropped dataset with the area not less than 512x512 pixels. | | stage3-eyes-640 | 46 | [Download](dataset-stage3-eyes-640.zip) | 3-stage cropped (with eye-focus) dataset with the shorter side not exceeding 640 pixels. | | stage3-eyes-800 | 46 | [Download](dataset-stage3-eyes-800.zip) | 3-stage cropped (with eye-focus) dataset with the shorter side not exceeding 800 pixels. |
AppleHarem/bubble_arknights
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-12-17T04:20:14+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2023-12-17T04:20:17+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of bubble (Arknights) ============================= This is the dataset of bubble (Arknights), containing 15 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). This is a WebUI contains crawlers and other thing: (LittleAppleWebUI)
[]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n" ]
[ 44 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n" ]
0653cb56881488153eec46985a4535d500144f80
- original dataset: [glaiveai/glaive-function-calling-v2](https://huggingface.co/datasets/glaiveai/glaive-function-calling-v2) ``` {'system_message': 'You are a helpful assistant with access to the following functions. Use them if required -', 'function_description': '{\n "name": "get_random_quote",\n "description": "Get a random quote",\n "parameters": {}\n}', 'conversations': [{'content': 'Hi, can you help me with something?', 'role': 'user'}, {'content': "Of course! I'm here to assist you. What do you need help with?", 'role': 'assistant'}, {'content': 'I need to book a flight from New York to Paris. Can you do that for me?', 'role': 'user'}, {'content': "I'm sorry, but as an AI, I don't have the capability to book flights or perform external tasks. My current function allows me to provide you with a random quote. Would you like to hear a quote instead?", 'role': 'assistant'}, {'content': 'Oh, I see. Sure, I would love to hear a random quote.', 'role': 'user'}, {'content': '{"name": "get_random_quote", "arguments": {}}', 'role': 'function-call'}, {'content': '{"quote": "The only way to do great work is to love what you do. - Steve Jobs"}', 'role': 'function-response'}, {'content': 'Here is a quote for you: "The only way to do great work is to love what you do." - Steve Jobs. I hope it inspires you!', 'role': 'assistant'}]} ```
heegyu/glaive-function-calling-v2-formatted
[ "region:us" ]
2023-12-17T04:48:09+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "dataset_info": {"features": [{"name": "system_message", "dtype": "string"}, {"name": "function_description", "dtype": "string"}, {"name": "conversations", "list": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}], "splits": [{"name": "train", "num_bytes": 250214164, "num_examples": 112960}], "download_size": 93753668, "dataset_size": 250214164}}
2023-12-17T04:51:14+00:00
[]
[]
TAGS #region-us
- original dataset: glaiveai/glaive-function-calling-v2
[]
[ "TAGS\n#region-us \n" ]
[ 6 ]
[ "passage: TAGS\n#region-us \n" ]
6e96d52b3593ed2ea27fa69002ca603804f5b2e7
Attribution: Wikipedia.org
styletts2-community/multilingual-pl-bert
[ "language:af", "language:an", "language:ar", "language:az", "language:ba", "language:be", "language:bg", "language:bn", "language:bpy", "language:bs", "language:ca", "language:cs", "language:cy", "language:da", "language:de", "language:el", "language:es", "language:et", "language:eu", "language:fi", "language:fr", "language:gu", "language:hak", "language:he", "language:hi", "language:hr", "language:hu", "language:hy", "language:hyw", "language:id", "language:io", "language:is", "language:it", "language:ja", "language:ka", "language:kk", "language:kn", "language:ko", "language:la", "language:lb", "language:lt", "language:lv", "language:mk", "language:ml", "language:mr", "language:ms", "language:ne", "language:nl", "language:no", "language:pa", "language:pl", "language:pt", "language:ro", "language:ru", "language:sk", "language:sl", "language:sq", "language:sr", "language:sv", "language:sw", "language:ta", "language:te", "language:th", "language:tr", "language:tt", "language:ur", "language:uz", "language:vi", "language:zh", "license:cc-by-4.0", "region:us" ]
2023-12-17T05:03:40+00:00
{"language": ["af", "an", "ar", "az", "ba", "be", "bg", "bn", "bpy", "bs", "ca", "cs", "cy", "da", "de", "el", "es", "et", "eu", "fi", "fr", "gu", "hak", "he", "hi", "hr", "hu", "hy", "hyw", "id", "io", "is", "it", "ja", "ka", "kk", "kn", "ko", "la", "lb", "lt", "lv", "mk", "ml", "mr", "ms", "ne", "nl", false, "pa", "pl", "pt", "ro", "ru", "sk", "sl", "sq", "sr", "sv", "sw", "ta", "te", "th", "tr", "tt", "ur", "uz", "vi", "zh"], "license": "cc-by-4.0"}
2024-01-08T04:30:44+00:00
[]
[ "af", "an", "ar", "az", "ba", "be", "bg", "bn", "bpy", "bs", "ca", "cs", "cy", "da", "de", "el", "es", "et", "eu", "fi", "fr", "gu", "hak", "he", "hi", "hr", "hu", "hy", "hyw", "id", "io", "is", "it", "ja", "ka", "kk", "kn", "ko", "la", "lb", "lt", "lv", "mk", "ml", "mr", "ms", "ne", "nl", "no", "pa", "pl", "pt", "ro", "ru", "sk", "sl", "sq", "sr", "sv", "sw", "ta", "te", "th", "tr", "tt", "ur", "uz", "vi", "zh" ]
TAGS #language-Afrikaans #language-Aragonese #language-Arabic #language-Azerbaijani #language-Bashkir #language-Belarusian #language-Bulgarian #language-Bengali #language-Bishnupriya #language-Bosnian #language-Catalan #language-Czech #language-Welsh #language-Danish #language-German #language-Modern Greek (1453-) #language-Spanish #language-Estonian #language-Basque #language-Finnish #language-French #language-Gujarati #language-Hakka Chinese #language-Hebrew #language-Hindi #language-Croatian #language-Hungarian #language-Armenian #language-Western Armenian #language-Indonesian #language-Ido #language-Icelandic #language-Italian #language-Japanese #language-Georgian #language-Kazakh #language-Kannada #language-Korean #language-Latin #language-Luxembourgish #language-Lithuanian #language-Latvian #language-Macedonian #language-Malayalam #language-Marathi #language-Malay (macrolanguage) #language-Nepali (macrolanguage) #language-Dutch #language-Norwegian #language-Panjabi #language-Polish #language-Portuguese #language-Romanian #language-Russian #language-Slovak #language-Slovenian #language-Albanian #language-Serbian #language-Swedish #language-Swahili (macrolanguage) #language-Tamil #language-Telugu #language-Thai #language-Turkish #language-Tatar #language-Urdu #language-Uzbek #language-Vietnamese #language-Chinese #license-cc-by-4.0 #region-us
Attribution: URL
[]
[ "TAGS\n#language-Afrikaans #language-Aragonese #language-Arabic #language-Azerbaijani #language-Bashkir #language-Belarusian #language-Bulgarian #language-Bengali #language-Bishnupriya #language-Bosnian #language-Catalan #language-Czech #language-Welsh #language-Danish #language-German #language-Modern Greek (1453-) #language-Spanish #language-Estonian #language-Basque #language-Finnish #language-French #language-Gujarati #language-Hakka Chinese #language-Hebrew #language-Hindi #language-Croatian #language-Hungarian #language-Armenian #language-Western Armenian #language-Indonesian #language-Ido #language-Icelandic #language-Italian #language-Japanese #language-Georgian #language-Kazakh #language-Kannada #language-Korean #language-Latin #language-Luxembourgish #language-Lithuanian #language-Latvian #language-Macedonian #language-Malayalam #language-Marathi #language-Malay (macrolanguage) #language-Nepali (macrolanguage) #language-Dutch #language-Norwegian #language-Panjabi #language-Polish #language-Portuguese #language-Romanian #language-Russian #language-Slovak #language-Slovenian #language-Albanian #language-Serbian #language-Swedish #language-Swahili (macrolanguage) #language-Tamil #language-Telugu #language-Thai #language-Turkish #language-Tatar #language-Urdu #language-Uzbek #language-Vietnamese #language-Chinese #license-cc-by-4.0 #region-us \n" ]
[ 422 ]
[ "passage: TAGS\n#language-Afrikaans #language-Aragonese #language-Arabic #language-Azerbaijani #language-Bashkir #language-Belarusian #language-Bulgarian #language-Bengali #language-Bishnupriya #language-Bosnian #language-Catalan #language-Czech #language-Welsh #language-Danish #language-German #language-Modern Greek (1453-) #language-Spanish #language-Estonian #language-Basque #language-Finnish #language-French #language-Gujarati #language-Hakka Chinese #language-Hebrew #language-Hindi #language-Croatian #language-Hungarian #language-Armenian #language-Western Armenian #language-Indonesian #language-Ido #language-Icelandic #language-Italian #language-Japanese #language-Georgian #language-Kazakh #language-Kannada #language-Korean #language-Latin #language-Luxembourgish #language-Lithuanian #language-Latvian #language-Macedonian #language-Malayalam #language-Marathi #language-Malay (macrolanguage) #language-Nepali (macrolanguage) #language-Dutch #language-Norwegian #language-Panjabi #language-Polish #language-Portuguese #language-Romanian #language-Russian #language-Slovak #language-Slovenian #language-Albanian #language-Serbian #language-Swedish #language-Swahili (macrolanguage) #language-Tamil #language-Telugu #language-Thai #language-Turkish #language-Tatar #language-Urdu #language-Uzbek #language-Vietnamese #language-Chinese #license-cc-by-4.0 #region-us \n" ]
a46d18f88b27cff221eece3d91b9866443671b50
# Dataset of jackie (Arknights) This is the dataset of jackie (Arknights), containing 26 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). This is a WebUI contains crawlers and other thing: ([LittleAppleWebUI](https://github.com/LittleApple-fp16/LittleAppleWebUI)) | Name | Images | Download | Description | |:----------------|---------:|:----------------------------------------|:-----------------------------------------------------------------------------------------| | raw | 26 | [Download](dataset-raw.zip) | Raw data with meta information. | | raw-stage3 | 70 | [Download](dataset-raw-stage3.zip) | 3-stage cropped raw data with meta information. | | raw-stage3-eyes | 75 | [Download](dataset-raw-stage3-eyes.zip) | 3-stage cropped (with eye-focus) raw data with meta information. | | 384x512 | 26 | [Download](dataset-384x512.zip) | 384x512 aligned dataset. | | 512x704 | 26 | [Download](dataset-512x704.zip) | 512x704 aligned dataset. | | 640x880 | 26 | [Download](dataset-640x880.zip) | 640x880 aligned dataset. | | stage3-640 | 70 | [Download](dataset-stage3-640.zip) | 3-stage cropped dataset with the shorter side not exceeding 640 pixels. | | stage3-800 | 70 | [Download](dataset-stage3-800.zip) | 3-stage cropped dataset with the shorter side not exceeding 800 pixels. | | stage3-p512-640 | 59 | [Download](dataset-stage3-p512-640.zip) | 3-stage cropped dataset with the area not less than 512x512 pixels. | | stage3-eyes-640 | 75 | [Download](dataset-stage3-eyes-640.zip) | 3-stage cropped (with eye-focus) dataset with the shorter side not exceeding 640 pixels. | | stage3-eyes-800 | 75 | [Download](dataset-stage3-eyes-800.zip) | 3-stage cropped (with eye-focus) dataset with the shorter side not exceeding 800 pixels. |
AppleHarem/jackie_arknights
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-12-17T05:16:44+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2023-12-17T05:16:48+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of jackie (Arknights) ============================= This is the dataset of jackie (Arknights), containing 26 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). This is a WebUI contains crawlers and other thing: (LittleAppleWebUI)
[]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n" ]
[ 44 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n" ]
2daf3bd608af7ea99e4212d2922c81d7f1d45c3e
# 100M Text Debiased Subset from LAION 2B - Captions in LAION-2B have a significant bias towards describing visual text content embedded in the images. - Released CLIP models have strong text spotting bias in almost every style of web images, resulting in the CLIP-filtering datasets inherently biased towards visual text dominant data. - CLIP models easily learn text spotting capacity from parrot captions while failing to connect the vision-language semantics, just like a text spotting parrot. For more details please see our [paper](https://arxiv.org/abs/2312.14232). ## Filtering Details We provide an alternative solution by releasing a less biased filtered LAION-2B 100M(107,166,507) subset. We construct a less biased 100M subset from the LAION-2B subset with Empty OCR results, CLIP score > 0.3, and Aesthetics score > 4.5. We add the ase_scores and K-means labels (4000 total) for each image-text pair. *We also released the dataset on [OpenDataLab](https://openxlab.org.cn/datasets/opendatalab-linyiqi/LAION-text-debiased-100M).* The pre-trained CLIP model is released on [github](https://github.com/opendatalab/CLIP-Parrot-Bias). ## Reference ``` @article{lin2023parrot, title={Parrot Captions Teach CLIP to Spot Text}, author={Yiqi Lin and Conghui He and Alex Jinpeng Wang and Bin Wang and Weijia Li and Mike Zheng Shou}, journal={arXiv preprint arXiv:2312.14232}, year={2023} } @misc{conghui2022opendatalab, author={He, Conghui and Li, Wei and Jin, Zhenjiang and Wang, Bin and Xu, Chao and Lin, Dahua}, title={OpenDataLab: Empowering General Artificial Intelligence with Open Datasets}, howpublished = {\url{https://opendatalab.com}}, year={2022} } ```
linyq/laion_text_debiased_100M
[ "license:cc-by-4.0", "arxiv:2312.14232", "region:us" ]
2023-12-17T05:18:50+00:00
{"license": "cc-by-4.0"}
2023-12-25T01:46:27+00:00
[ "2312.14232" ]
[]
TAGS #license-cc-by-4.0 #arxiv-2312.14232 #region-us
# 100M Text Debiased Subset from LAION 2B - Captions in LAION-2B have a significant bias towards describing visual text content embedded in the images. - Released CLIP models have strong text spotting bias in almost every style of web images, resulting in the CLIP-filtering datasets inherently biased towards visual text dominant data. - CLIP models easily learn text spotting capacity from parrot captions while failing to connect the vision-language semantics, just like a text spotting parrot. For more details please see our paper. ## Filtering Details We provide an alternative solution by releasing a less biased filtered LAION-2B 100M(107,166,507) subset. We construct a less biased 100M subset from the LAION-2B subset with Empty OCR results, CLIP score > 0.3, and Aesthetics score > 4.5. We add the ase_scores and K-means labels (4000 total) for each image-text pair. *We also released the dataset on OpenDataLab.* The pre-trained CLIP model is released on github. ## Reference
[ "# 100M Text Debiased Subset from LAION 2B\n\n- Captions in LAION-2B have a significant bias towards describing visual text content embedded in the images.\n- Released CLIP models have strong text spotting bias in almost every style of web images, resulting in the CLIP-filtering datasets inherently biased towards visual text dominant data.\n- CLIP models easily learn text spotting capacity from parrot captions while failing to connect the vision-language semantics, just like a text spotting parrot.\n\nFor more details please see our paper.", "## Filtering Details\n\nWe provide an alternative solution by releasing a less biased filtered LAION-2B 100M(107,166,507) subset.\n\nWe construct a less biased 100M subset from the LAION-2B subset with Empty OCR results, CLIP score > 0.3, and Aesthetics score > 4.5.\n\nWe add the ase_scores and K-means labels (4000 total) for each image-text pair.\n\n*We also released the dataset on OpenDataLab.*\n\nThe pre-trained CLIP model is released on github.", "## Reference" ]
[ "TAGS\n#license-cc-by-4.0 #arxiv-2312.14232 #region-us \n", "# 100M Text Debiased Subset from LAION 2B\n\n- Captions in LAION-2B have a significant bias towards describing visual text content embedded in the images.\n- Released CLIP models have strong text spotting bias in almost every style of web images, resulting in the CLIP-filtering datasets inherently biased towards visual text dominant data.\n- CLIP models easily learn text spotting capacity from parrot captions while failing to connect the vision-language semantics, just like a text spotting parrot.\n\nFor more details please see our paper.", "## Filtering Details\n\nWe provide an alternative solution by releasing a less biased filtered LAION-2B 100M(107,166,507) subset.\n\nWe construct a less biased 100M subset from the LAION-2B subset with Empty OCR results, CLIP score > 0.3, and Aesthetics score > 4.5.\n\nWe add the ase_scores and K-means labels (4000 total) for each image-text pair.\n\n*We also released the dataset on OpenDataLab.*\n\nThe pre-trained CLIP model is released on github.", "## Reference" ]
[ 24, 128, 128, 2 ]
[ "passage: TAGS\n#license-cc-by-4.0 #arxiv-2312.14232 #region-us \n# 100M Text Debiased Subset from LAION 2B\n\n- Captions in LAION-2B have a significant bias towards describing visual text content embedded in the images.\n- Released CLIP models have strong text spotting bias in almost every style of web images, resulting in the CLIP-filtering datasets inherently biased towards visual text dominant data.\n- CLIP models easily learn text spotting capacity from parrot captions while failing to connect the vision-language semantics, just like a text spotting parrot.\n\nFor more details please see our paper.## Filtering Details\n\nWe provide an alternative solution by releasing a less biased filtered LAION-2B 100M(107,166,507) subset.\n\nWe construct a less biased 100M subset from the LAION-2B subset with Empty OCR results, CLIP score > 0.3, and Aesthetics score > 4.5.\n\nWe add the ase_scores and K-means labels (4000 total) for each image-text pair.\n\n*We also released the dataset on OpenDataLab.*\n\nThe pre-trained CLIP model is released on github.## Reference" ]
34c47447b28310dafc5a717d1814e9628d1c871c
dataset designed to PEFT fine-tune mistral 7B fine-tune and quantized model trained on this dataset can be found at netcat420/MHENNlit
netcat420/quiklit
[ "license:mit", "region:us" ]
2023-12-17T05:29:04+00:00
{"license": "mit"}
2023-12-20T03:18:17+00:00
[]
[]
TAGS #license-mit #region-us
dataset designed to PEFT fine-tune mistral 7B fine-tune and quantized model trained on this dataset can be found at netcat420/MHENNlit
[]
[ "TAGS\n#license-mit #region-us \n" ]
[ 11 ]
[ "passage: TAGS\n#license-mit #region-us \n" ]
6249642661146608e1de74de512efd9fd1fac0d2
## Noisy standard malay translation Original dataset from https://huggingface.co/collections/mesolitica/malaysian-noisy-translation-657e5f88e6759943575a91ac
mesolitica/noisy-standard-malay-translation-instructions
[ "language:ms", "region:us" ]
2023-12-17T05:52:43+00:00
{"language": ["ms"]}
2023-12-17T07:56:33+00:00
[]
[ "ms" ]
TAGS #language-Malay (macrolanguage) #region-us
## Noisy standard malay translation Original dataset from URL
[ "## Noisy standard malay translation\n\nOriginal dataset from URL" ]
[ "TAGS\n#language-Malay (macrolanguage) #region-us \n", "## Noisy standard malay translation\n\nOriginal dataset from URL" ]
[ 16, 13 ]
[ "passage: TAGS\n#language-Malay (macrolanguage) #region-us \n## Noisy standard malay translation\n\nOriginal dataset from URL" ]
6d1d08c026a329320c252db65db97bec91e6eeb9
# This is the DatasetHub for the recommender database - You can use this for applying recommended system
rmit-denominator/recommender-data
[ "task_categories:feature-extraction", "language:aa", "license:mit", "biology", "region:us" ]
2023-12-17T06:06:55+00:00
{"language": ["aa"], "license": "mit", "task_categories": ["feature-extraction"], "pretty_name": "Feature_extractor-dataset", "tags": ["biology"]}
2023-12-17T06:13:40+00:00
[]
[ "aa" ]
TAGS #task_categories-feature-extraction #language-Afar #license-mit #biology #region-us
# This is the DatasetHub for the recommender database - You can use this for applying recommended system
[ "# This is the DatasetHub for the recommender database\n- You can use this for applying recommended system" ]
[ "TAGS\n#task_categories-feature-extraction #language-Afar #license-mit #biology #region-us \n", "# This is the DatasetHub for the recommender database\n- You can use this for applying recommended system" ]
[ 31, 22 ]
[ "passage: TAGS\n#task_categories-feature-extraction #language-Afar #license-mit #biology #region-us \n# This is the DatasetHub for the recommender database\n- You can use this for applying recommended system" ]
ab759c47b710d504153b275685167307e98a4a4c
# MATH Dataset Merged This dataset is created from [EleutherAI/hendrycks_math](https://huggingface.co/datasets/EleutherAI/hendrycks_math) using this script ```python import datasets from datasets import DatasetDict # dataset = datasets.load_dataset("EleutherAI/hendrycks_math") configs = ['algebra', 'counting_and_probability', 'geometry', 'intermediate_algebra', 'number_theory', 'prealgebra', 'precalculus'] all_train_datasets = [] all_test_datasets = [] for c in configs: c_dataset = datasets.load_dataset("EleutherAI/hendrycks_math", c) all_train_datasets.append(c_dataset['train']) all_test_datasets.append(c_dataset['test']) # merge all datasets merged_train_dataset = datasets.concatenate_datasets(all_train_datasets) merged_test_dataset = datasets.concatenate_datasets(all_test_datasets) merged_dataset = DatasetDict({'train': merged_train_dataset, 'test': merged_test_dataset}) merged_dataset ```
JeremiahZ/hendrycks_math_merged
[ "region:us" ]
2023-12-17T07:13:48+00:00
{"dataset_info": {"features": [{"name": "problem", "dtype": "string"}, {"name": "level", "dtype": "string"}, {"name": "type", "dtype": "string"}, {"name": "solution", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 5984772, "num_examples": 7500}, {"name": "test", "num_bytes": 3732833, "num_examples": 5000}], "download_size": 4848009, "dataset_size": 9717605}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}]}
2023-12-17T07:20:03+00:00
[]
[]
TAGS #region-us
# MATH Dataset Merged This dataset is created from EleutherAI/hendrycks_math using this script
[ "# MATH Dataset Merged\nThis dataset is created from EleutherAI/hendrycks_math using this script" ]
[ "TAGS\n#region-us \n", "# MATH Dataset Merged\nThis dataset is created from EleutherAI/hendrycks_math using this script" ]
[ 6, 26 ]
[ "passage: TAGS\n#region-us \n# MATH Dataset Merged\nThis dataset is created from EleutherAI/hendrycks_math using this script" ]
ae97ba62fdcb679634a763625a0092707cb1db20
safe_new: 512x512 ty[p:typo image]
ScottHan/safe
[ "region:us" ]
2023-12-17T08:00:13+00:00
{}
2023-12-17T08:20:20+00:00
[]
[]
TAGS #region-us
safe_new: 512x512 ty[p:typo image]
[]
[ "TAGS\n#region-us \n" ]
[ 6 ]
[ "passage: TAGS\n#region-us \n" ]
f020af72a28e6b9476198b6ce87b61285e75e265
# Dataset of frost (Arknights) This is the dataset of frost (Arknights), containing 74 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). This is a WebUI contains crawlers and other thing: ([LittleAppleWebUI](https://github.com/LittleApple-fp16/LittleAppleWebUI)) | Name | Images | Download | Description | |:----------------|---------:|:----------------------------------------|:-----------------------------------------------------------------------------------------| | raw | 74 | [Download](dataset-raw.zip) | Raw data with meta information. | | raw-stage3 | 182 | [Download](dataset-raw-stage3.zip) | 3-stage cropped raw data with meta information. | | raw-stage3-eyes | 200 | [Download](dataset-raw-stage3-eyes.zip) | 3-stage cropped (with eye-focus) raw data with meta information. | | 384x512 | 74 | [Download](dataset-384x512.zip) | 384x512 aligned dataset. | | 512x704 | 74 | [Download](dataset-512x704.zip) | 512x704 aligned dataset. | | 640x880 | 74 | [Download](dataset-640x880.zip) | 640x880 aligned dataset. | | stage3-640 | 182 | [Download](dataset-stage3-640.zip) | 3-stage cropped dataset with the shorter side not exceeding 640 pixels. | | stage3-800 | 182 | [Download](dataset-stage3-800.zip) | 3-stage cropped dataset with the shorter side not exceeding 800 pixels. | | stage3-p512-640 | 142 | [Download](dataset-stage3-p512-640.zip) | 3-stage cropped dataset with the area not less than 512x512 pixels. | | stage3-eyes-640 | 200 | [Download](dataset-stage3-eyes-640.zip) | 3-stage cropped (with eye-focus) dataset with the shorter side not exceeding 640 pixels. | | stage3-eyes-800 | 200 | [Download](dataset-stage3-eyes-800.zip) | 3-stage cropped (with eye-focus) dataset with the shorter side not exceeding 800 pixels. |
AppleHarem/frost_arknights
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-12-17T08:59:23+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2023-12-17T08:59:29+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of frost (Arknights) ============================ This is the dataset of frost (Arknights), containing 74 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). This is a WebUI contains crawlers and other thing: (LittleAppleWebUI)
[]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n" ]
[ 44 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n" ]
b6fa0dc8e4a27c23f8e58008a4355c11ebda14f0
# Dataset Card for "Boundary_detection_twomask" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
vigneshgs7/Boundary_detection_twomask
[ "region:us" ]
2023-12-17T09:31:26+00:00
{"dataset_info": {"features": [{"name": "pixel_values", "dtype": "image"}, {"name": "label", "dtype": "image"}], "splits": [{"name": "train", "num_bytes": 1342892808.0, "num_examples": 27}], "download_size": 88157922, "dataset_size": 1342892808.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-12-17T09:31:59+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Boundary_detection_twomask" More Information needed
[ "# Dataset Card for \"Boundary_detection_twomask\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Boundary_detection_twomask\"\n\nMore Information needed" ]
[ 6, 20 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"Boundary_detection_twomask\"\n\nMore Information needed" ]
3dcb0d3d570bc04f074bb1da1f8564cd7519e5a7
# Dataset Card for Dataset Name <!-- Provide a quick summary of the dataset. --> ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
RahulSundar/bhoomi-nestham-feedback
[ "region:us" ]
2023-12-17T09:43:07+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data.csv"}]}]}
2023-12-17T09:43:08+00:00
[]
[]
TAGS #region-us
# Dataset Card for Dataset Name ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Dataset Name", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Dataset Name", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 8, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Dataset Name## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
4aab88cf05bb01ddc4d2b8fd200005cb8a36975c
# Dataset of pudding (Arknights) This is the dataset of pudding (Arknights), containing 20 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). This is a WebUI contains crawlers and other thing: ([LittleAppleWebUI](https://github.com/LittleApple-fp16/LittleAppleWebUI)) | Name | Images | Download | Description | |:----------------|---------:|:----------------------------------------|:-----------------------------------------------------------------------------------------| | raw | 20 | [Download](dataset-raw.zip) | Raw data with meta information. | | raw-stage3 | 52 | [Download](dataset-raw-stage3.zip) | 3-stage cropped raw data with meta information. | | raw-stage3-eyes | 59 | [Download](dataset-raw-stage3-eyes.zip) | 3-stage cropped (with eye-focus) raw data with meta information. | | 384x512 | 20 | [Download](dataset-384x512.zip) | 384x512 aligned dataset. | | 512x704 | 20 | [Download](dataset-512x704.zip) | 512x704 aligned dataset. | | 640x880 | 20 | [Download](dataset-640x880.zip) | 640x880 aligned dataset. | | stage3-640 | 52 | [Download](dataset-stage3-640.zip) | 3-stage cropped dataset with the shorter side not exceeding 640 pixels. | | stage3-800 | 52 | [Download](dataset-stage3-800.zip) | 3-stage cropped dataset with the shorter side not exceeding 800 pixels. | | stage3-p512-640 | 48 | [Download](dataset-stage3-p512-640.zip) | 3-stage cropped dataset with the area not less than 512x512 pixels. | | stage3-eyes-640 | 59 | [Download](dataset-stage3-eyes-640.zip) | 3-stage cropped (with eye-focus) dataset with the shorter side not exceeding 640 pixels. | | stage3-eyes-800 | 59 | [Download](dataset-stage3-eyes-800.zip) | 3-stage cropped (with eye-focus) dataset with the shorter side not exceeding 800 pixels. |
AppleHarem/pudding_arknights
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-12-17T09:49:53+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2023-12-17T09:49:56+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of pudding (Arknights) ============================== This is the dataset of pudding (Arknights), containing 20 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). This is a WebUI contains crawlers and other thing: (LittleAppleWebUI)
[]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n" ]
[ 44 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n" ]
476e30a2174d0424d759f9e8bb018f891e90217a
# Dataset of lee (Arknights) This is the dataset of lee (Arknights), containing 50 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). This is a WebUI contains crawlers and other thing: ([LittleAppleWebUI](https://github.com/LittleApple-fp16/LittleAppleWebUI)) | Name | Images | Download | Description | |:----------------|---------:|:----------------------------------------|:-----------------------------------------------------------------------------------------| | raw | 50 | [Download](dataset-raw.zip) | Raw data with meta information. | | raw-stage3 | 132 | [Download](dataset-raw-stage3.zip) | 3-stage cropped raw data with meta information. | | raw-stage3-eyes | 136 | [Download](dataset-raw-stage3-eyes.zip) | 3-stage cropped (with eye-focus) raw data with meta information. | | 384x512 | 50 | [Download](dataset-384x512.zip) | 384x512 aligned dataset. | | 512x704 | 50 | [Download](dataset-512x704.zip) | 512x704 aligned dataset. | | 640x880 | 50 | [Download](dataset-640x880.zip) | 640x880 aligned dataset. | | stage3-640 | 132 | [Download](dataset-stage3-640.zip) | 3-stage cropped dataset with the shorter side not exceeding 640 pixels. | | stage3-800 | 132 | [Download](dataset-stage3-800.zip) | 3-stage cropped dataset with the shorter side not exceeding 800 pixels. | | stage3-p512-640 | 116 | [Download](dataset-stage3-p512-640.zip) | 3-stage cropped dataset with the area not less than 512x512 pixels. | | stage3-eyes-640 | 136 | [Download](dataset-stage3-eyes-640.zip) | 3-stage cropped (with eye-focus) dataset with the shorter side not exceeding 640 pixels. | | stage3-eyes-800 | 136 | [Download](dataset-stage3-eyes-800.zip) | 3-stage cropped (with eye-focus) dataset with the shorter side not exceeding 800 pixels. |
AppleHarem/lee_arknights
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-12-17T11:42:33+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2023-12-17T11:42:42+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of lee (Arknights) ========================== This is the dataset of lee (Arknights), containing 50 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). This is a WebUI contains crawlers and other thing: (LittleAppleWebUI)
[]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n" ]
[ 44 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n" ]
d1821308921b2a0e3acf36fb348b71aaa54d1868
# Dataset Card for "te_wiki_sentences_1000000" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
deokhk/te_wiki_sentences_1000000
[ "region:us" ]
2023-12-17T12:06:46+00:00
{"dataset_info": {"features": [{"name": "sentence", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 189344044, "num_examples": 1000000}, {"name": "dev", "num_bytes": 166164, "num_examples": 1000}], "download_size": 43341997, "dataset_size": 189510208}}
2023-12-17T12:06:54+00:00
[]
[]
TAGS #region-us
# Dataset Card for "te_wiki_sentences_1000000" More Information needed
[ "# Dataset Card for \"te_wiki_sentences_1000000\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"te_wiki_sentences_1000000\"\n\nMore Information needed" ]
[ 6, 18 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"te_wiki_sentences_1000000\"\n\nMore Information needed" ]
5ee2171bc2bc0880d2f35c16063096ec1c4dc4da
# Dataset Card for "mmarco-passage-vi" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tiennv/mmarco-passage-vi
[ "region:us" ]
2023-12-17T13:10:45+00:00
{"dataset_info": {"features": [{"name": "query_id", "dtype": "int64"}, {"name": "query", "dtype": "string"}, {"name": "positive_id", "dtype": "int64"}, {"name": "positive", "dtype": "string"}, {"name": "negatives", "sequence": "string"}], "splits": [{"name": "train", "num_bytes": 11894387626, "num_examples": 415936}], "download_size": 5402037391, "dataset_size": 11894387626}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-12-17T13:21:58+00:00
[]
[]
TAGS #region-us
# Dataset Card for "mmarco-passage-vi" More Information needed
[ "# Dataset Card for \"mmarco-passage-vi\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"mmarco-passage-vi\"\n\nMore Information needed" ]
[ 6, 18 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"mmarco-passage-vi\"\n\nMore Information needed" ]
ddd42d3a5d3c408dcee7212391d404546f6eb845
# Dataset Card for "mmarco-passage-en" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tiennv/mmarco-passage-en
[ "region:us" ]
2023-12-17T13:25:39+00:00
{"dataset_info": {"features": [{"name": "query_id", "dtype": "int64"}, {"name": "query", "dtype": "string"}, {"name": "positive_id", "dtype": "int64"}, {"name": "positive", "dtype": "string"}, {"name": "negatives", "sequence": "string"}], "splits": [{"name": "train", "num_bytes": 9290250492, "num_examples": 415936}], "download_size": 4889145151, "dataset_size": 9290250492}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-12-17T13:35:37+00:00
[]
[]
TAGS #region-us
# Dataset Card for "mmarco-passage-en" More Information needed
[ "# Dataset Card for \"mmarco-passage-en\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"mmarco-passage-en\"\n\nMore Information needed" ]
[ 6, 18 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"mmarco-passage-en\"\n\nMore Information needed" ]