sha
stringlengths
40
40
text
stringlengths
1
13.4M
id
stringlengths
2
117
tags
listlengths
1
7.91k
created_at
stringlengths
25
25
metadata
stringlengths
2
875k
last_modified
stringlengths
25
25
arxiv
listlengths
0
25
languages
listlengths
0
7.91k
tags_str
stringlengths
17
159k
text_str
stringlengths
1
447k
text_lists
listlengths
0
352
processed_texts
listlengths
1
353
tokens_length
listlengths
1
353
input_texts
listlengths
1
40
f753ce8d5c3efd476062e98e151d6d790c57ca24
# Dataset Card for filtered-mc4 See original [mC4 dataset](https://huggingface.co/datasets/mc4) descriptions. You can apply any regular expression to the mC4 dataset like this: ```python from datasets import load_dataset dataset = load_dataset('hiroshi-matsuda-rit/filtered_mc4', 'ja', split='train', reject_patterns=[r"(セフレ|出会い?系|(?<!ユニ)セックス|ソープガイド)", r"[^\s]\ [^\s]+\ [^\s]"], max_reject_pattern_occurence=3, streaming=True) ``` ### Citation Information ``` @article{2019t5, author = {Colin Raffel and Noam Shazeer and Adam Roberts and Katherine Lee and Sharan Narang and Michael Matena and Yanqi Zhou and Wei Li and Peter J. Liu}, title = {Exploring the Limits of Transfer Learning with a Unified Text-to-Text Transformer}, journal = {arXiv e-prints}, year = {2019}, archivePrefix = {arXiv}, eprint = {1910.10683}, } ```
hiroshi-matsuda-rit/filtered_mc4
[ "multilinguality:multilingual", "license:odc-by", "arxiv:1910.10683", "region:us" ]
2023-08-28T06:58:22+00:00
{"license": ["odc-by"], "multilinguality": ["multilingual"], "pretty_name": "filtered-mc4"}
2023-08-28T07:52:06+00:00
[ "1910.10683" ]
[]
TAGS #multilinguality-multilingual #license-odc-by #arxiv-1910.10683 #region-us
# Dataset Card for filtered-mc4 See original mC4 dataset descriptions. You can apply any regular expression to the mC4 dataset like this:
[ "# Dataset Card for filtered-mc4\n\nSee original mC4 dataset descriptions.\n\nYou can apply any regular expression to the mC4 dataset like this:" ]
[ "TAGS\n#multilinguality-multilingual #license-odc-by #arxiv-1910.10683 #region-us \n", "# Dataset Card for filtered-mc4\n\nSee original mC4 dataset descriptions.\n\nYou can apply any regular expression to the mC4 dataset like this:" ]
[ 31, 37 ]
[ "passage: TAGS\n#multilinguality-multilingual #license-odc-by #arxiv-1910.10683 #region-us \n# Dataset Card for filtered-mc4\n\nSee original mC4 dataset descriptions.\n\nYou can apply any regular expression to the mC4 dataset like this:" ]
739e27ccb7e787c95fce3c5a7333999e3b46f962
# Dataset Card for "nl2sql_general_ability" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ChengSong/nl2sql_general_ability
[ "region:us" ]
2023-08-28T07:02:36+00:00
{"dataset_info": {"features": [{"name": "prompt", "dtype": "string"}, {"name": "response", "dtype": "string"}, {"name": "ability", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 231190, "num_examples": 1108}, {"name": "test", "num_bytes": 35116, "num_examples": 263}], "download_size": 70063, "dataset_size": 266306}}
2023-08-28T07:02:48+00:00
[]
[]
TAGS #region-us
# Dataset Card for "nl2sql_general_ability" More Information needed
[ "# Dataset Card for \"nl2sql_general_ability\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"nl2sql_general_ability\"\n\nMore Information needed" ]
[ 6, 18 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"nl2sql_general_ability\"\n\nMore Information needed" ]
c0a0dd02b4c63f0acccb371c6e3fddd882485119
# Dataset Card for "nl2sql_general_ability_enhanced" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ChengSong/nl2sql_general_ability_enhanced
[ "region:us" ]
2023-08-28T07:30:02+00:00
{"dataset_info": {"features": [{"name": "instruction", "dtype": "string"}, {"name": "schema", "dtype": "string"}, {"name": "knowledge", "dtype": "string"}, {"name": "constraint", "dtype": "string"}, {"name": "query", "dtype": "string"}, {"name": "response", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 177122354, "num_examples": 109911}], "download_size": 2742804, "dataset_size": 177122354}}
2023-08-28T07:30:20+00:00
[]
[]
TAGS #region-us
# Dataset Card for "nl2sql_general_ability_enhanced" More Information needed
[ "# Dataset Card for \"nl2sql_general_ability_enhanced\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"nl2sql_general_ability_enhanced\"\n\nMore Information needed" ]
[ 6, 22 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"nl2sql_general_ability_enhanced\"\n\nMore Information needed" ]
0a98dbf4ed26f0a55660fb54c6f1f3038af383a9
# Dataset Card for Evaluation run of psyche/kollama2-7b-v3 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/psyche/kollama2-7b-v3 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [psyche/kollama2-7b-v3](https://huggingface.co/psyche/kollama2-7b-v3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_psyche__kollama2-7b-v3", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-28T08:31:05.396495](https://huggingface.co/datasets/open-llm-leaderboard/details_psyche__kollama2-7b-v3/blob/main/results_2023-08-28T08%3A31%3A05.396495.json): ```python { "all": { "acc": 0.4080126650463043, "acc_stderr": 0.03490803118091981, "acc_norm": 0.41212249060720096, "acc_norm_stderr": 0.034895302556044526, "mc1": 0.2937576499388005, "mc1_stderr": 0.015945068581236618, "mc2": 0.42921423081004945, "mc2_stderr": 0.014206971382449723 }, "harness|arc:challenge|25": { "acc": 0.4539249146757679, "acc_stderr": 0.01454922110517187, "acc_norm": 0.4974402730375427, "acc_norm_stderr": 0.014611199329843784 }, "harness|hellaswag|10": { "acc": 0.5855407289384584, "acc_stderr": 0.004916216503770336, "acc_norm": 0.7845050786695877, "acc_norm_stderr": 0.004103249411456488 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.3925925925925926, "acc_stderr": 0.04218506215368879, "acc_norm": 0.3925925925925926, "acc_norm_stderr": 0.04218506215368879 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.35526315789473684, "acc_stderr": 0.038947344870133176, "acc_norm": 0.35526315789473684, "acc_norm_stderr": 0.038947344870133176 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.37735849056603776, "acc_stderr": 0.029832808114796005, "acc_norm": 0.37735849056603776, "acc_norm_stderr": 0.029832808114796005 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.4097222222222222, "acc_stderr": 0.04112490974670787, "acc_norm": 0.4097222222222222, "acc_norm_stderr": 0.04112490974670787 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.21, "acc_stderr": 0.04093601807403326, "acc_norm": 0.21, "acc_norm_stderr": 0.04093601807403326 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.3236994219653179, "acc_stderr": 0.0356760379963917, "acc_norm": 0.3236994219653179, "acc_norm_stderr": 0.0356760379963917 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.19607843137254902, "acc_stderr": 0.03950581861179963, "acc_norm": 0.19607843137254902, "acc_norm_stderr": 0.03950581861179963 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4127659574468085, "acc_stderr": 0.03218471141400351, "acc_norm": 0.4127659574468085, "acc_norm_stderr": 0.03218471141400351 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.30701754385964913, "acc_stderr": 0.04339138322579861, "acc_norm": 0.30701754385964913, "acc_norm_stderr": 0.04339138322579861 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.38620689655172413, "acc_stderr": 0.04057324734419034, "acc_norm": 0.38620689655172413, "acc_norm_stderr": 0.04057324734419034 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.23544973544973544, "acc_stderr": 0.021851509822031722, "acc_norm": 0.23544973544973544, "acc_norm_stderr": 0.021851509822031722 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3253968253968254, "acc_stderr": 0.04190596438871137, "acc_norm": 0.3253968253968254, "acc_norm_stderr": 0.04190596438871137 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.3935483870967742, "acc_stderr": 0.027791878753132274, "acc_norm": 0.3935483870967742, "acc_norm_stderr": 0.027791878753132274 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.3054187192118227, "acc_stderr": 0.03240661565868408, "acc_norm": 0.3054187192118227, "acc_norm_stderr": 0.03240661565868408 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.4909090909090909, "acc_stderr": 0.0390369864774844, "acc_norm": 0.4909090909090909, "acc_norm_stderr": 0.0390369864774844 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.41919191919191917, "acc_stderr": 0.035155207286704175, "acc_norm": 0.41919191919191917, "acc_norm_stderr": 0.035155207286704175 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.5958549222797928, "acc_stderr": 0.0354150857888402, "acc_norm": 0.5958549222797928, "acc_norm_stderr": 0.0354150857888402 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.3487179487179487, "acc_stderr": 0.02416278028401772, "acc_norm": 0.3487179487179487, "acc_norm_stderr": 0.02416278028401772 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.24444444444444444, "acc_stderr": 0.02620276653465215, "acc_norm": 0.24444444444444444, "acc_norm_stderr": 0.02620276653465215 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.3445378151260504, "acc_stderr": 0.030868682604121626, "acc_norm": 0.3445378151260504, "acc_norm_stderr": 0.030868682604121626 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.23841059602649006, "acc_stderr": 0.0347918557259966, "acc_norm": 0.23841059602649006, "acc_norm_stderr": 0.0347918557259966 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.48807339449541287, "acc_stderr": 0.021431223617362223, "acc_norm": 0.48807339449541287, "acc_norm_stderr": 0.021431223617362223 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.23148148148148148, "acc_stderr": 0.028765111718046965, "acc_norm": 0.23148148148148148, "acc_norm_stderr": 0.028765111718046965 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.44607843137254904, "acc_stderr": 0.03488845451304974, "acc_norm": 0.44607843137254904, "acc_norm_stderr": 0.03488845451304974 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.4430379746835443, "acc_stderr": 0.03233532777533484, "acc_norm": 0.4430379746835443, "acc_norm_stderr": 0.03233532777533484 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.515695067264574, "acc_stderr": 0.0335412657542081, "acc_norm": 0.515695067264574, "acc_norm_stderr": 0.0335412657542081 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.4732824427480916, "acc_stderr": 0.04379024936553894, "acc_norm": 0.4732824427480916, "acc_norm_stderr": 0.04379024936553894 }, "harness|hendrycksTest-international_law|5": { "acc": 0.5785123966942148, "acc_stderr": 0.04507732278775087, "acc_norm": 0.5785123966942148, "acc_norm_stderr": 0.04507732278775087 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.42592592592592593, "acc_stderr": 0.0478034362693679, "acc_norm": 0.42592592592592593, "acc_norm_stderr": 0.0478034362693679 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.4294478527607362, "acc_stderr": 0.03889066619112722, "acc_norm": 0.4294478527607362, "acc_norm_stderr": 0.03889066619112722 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4017857142857143, "acc_stderr": 0.04653333146973647, "acc_norm": 0.4017857142857143, "acc_norm_stderr": 0.04653333146973647 }, "harness|hendrycksTest-management|5": { "acc": 0.47572815533980584, "acc_stderr": 0.049449010929737795, "acc_norm": 0.47572815533980584, "acc_norm_stderr": 0.049449010929737795 }, "harness|hendrycksTest-marketing|5": { "acc": 0.6025641025641025, "acc_stderr": 0.032059534537892925, "acc_norm": 0.6025641025641025, "acc_norm_stderr": 0.032059534537892925 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.5938697318007663, "acc_stderr": 0.017562037406478923, "acc_norm": 0.5938697318007663, "acc_norm_stderr": 0.017562037406478923 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.45664739884393063, "acc_stderr": 0.02681771813034892, "acc_norm": 0.45664739884393063, "acc_norm_stderr": 0.02681771813034892 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574915, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574915 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.40522875816993464, "acc_stderr": 0.02811092849280908, "acc_norm": 0.40522875816993464, "acc_norm_stderr": 0.02811092849280908 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.5273311897106109, "acc_stderr": 0.02835563356832818, "acc_norm": 0.5273311897106109, "acc_norm_stderr": 0.02835563356832818 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.4567901234567901, "acc_stderr": 0.027716661650194045, "acc_norm": 0.4567901234567901, "acc_norm_stderr": 0.027716661650194045 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.34397163120567376, "acc_stderr": 0.028338017428611327, "acc_norm": 0.34397163120567376, "acc_norm_stderr": 0.028338017428611327 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.33572359843546284, "acc_stderr": 0.012061304157664604, "acc_norm": 0.33572359843546284, "acc_norm_stderr": 0.012061304157664604 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.36764705882352944, "acc_stderr": 0.029289413409403196, "acc_norm": 0.36764705882352944, "acc_norm_stderr": 0.029289413409403196 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.4150326797385621, "acc_stderr": 0.01993362777685741, "acc_norm": 0.4150326797385621, "acc_norm_stderr": 0.01993362777685741 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5181818181818182, "acc_stderr": 0.04785964010794917, "acc_norm": 0.5181818181818182, "acc_norm_stderr": 0.04785964010794917 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.27755102040816326, "acc_stderr": 0.02866685779027465, "acc_norm": 0.27755102040816326, "acc_norm_stderr": 0.02866685779027465 }, "harness|hendrycksTest-sociology|5": { "acc": 0.5074626865671642, "acc_stderr": 0.035351400842767194, "acc_norm": 0.5074626865671642, "acc_norm_stderr": 0.035351400842767194 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.64, "acc_stderr": 0.04824181513244218, "acc_norm": 0.64, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-virology|5": { "acc": 0.3674698795180723, "acc_stderr": 0.03753267402120575, "acc_norm": 0.3674698795180723, "acc_norm_stderr": 0.03753267402120575 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.6081871345029239, "acc_stderr": 0.037439798259263996, "acc_norm": 0.6081871345029239, "acc_norm_stderr": 0.037439798259263996 }, "harness|truthfulqa:mc|0": { "mc1": 0.2937576499388005, "mc1_stderr": 0.015945068581236618, "mc2": 0.42921423081004945, "mc2_stderr": 0.014206971382449723 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_psyche__kollama2-7b-v3
[ "region:us" ]
2023-08-28T07:31:23+00:00
{"pretty_name": "Evaluation run of psyche/kollama2-7b-v3", "dataset_summary": "Dataset automatically created during the evaluation run of model [psyche/kollama2-7b-v3](https://huggingface.co/psyche/kollama2-7b-v3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_psyche__kollama2-7b-v3\",\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-08-28T08:31:05.396495](https://huggingface.co/datasets/open-llm-leaderboard/details_psyche__kollama2-7b-v3/blob/main/results_2023-08-28T08%3A31%3A05.396495.json):\n\n```python\n{\n \"all\": {\n \"acc\": 0.4080126650463043,\n \"acc_stderr\": 0.03490803118091981,\n \"acc_norm\": 0.41212249060720096,\n \"acc_norm_stderr\": 0.034895302556044526,\n \"mc1\": 0.2937576499388005,\n \"mc1_stderr\": 0.015945068581236618,\n \"mc2\": 0.42921423081004945,\n \"mc2_stderr\": 0.014206971382449723\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.4539249146757679,\n \"acc_stderr\": 0.01454922110517187,\n \"acc_norm\": 0.4974402730375427,\n \"acc_norm_stderr\": 0.014611199329843784\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5855407289384584,\n \"acc_stderr\": 0.004916216503770336,\n \"acc_norm\": 0.7845050786695877,\n \"acc_norm_stderr\": 0.004103249411456488\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.3925925925925926,\n \"acc_stderr\": 0.04218506215368879,\n \"acc_norm\": 0.3925925925925926,\n \"acc_norm_stderr\": 0.04218506215368879\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.35526315789473684,\n \"acc_stderr\": 0.038947344870133176,\n \"acc_norm\": 0.35526315789473684,\n \"acc_norm_stderr\": 0.038947344870133176\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.37735849056603776,\n \"acc_stderr\": 0.029832808114796005,\n \"acc_norm\": 0.37735849056603776,\n \"acc_norm_stderr\": 0.029832808114796005\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.4097222222222222,\n \"acc_stderr\": 0.04112490974670787,\n \"acc_norm\": 0.4097222222222222,\n \"acc_norm_stderr\": 0.04112490974670787\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.04093601807403326,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.04093601807403326\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3236994219653179,\n \"acc_stderr\": 0.0356760379963917,\n \"acc_norm\": 0.3236994219653179,\n \"acc_norm_stderr\": 0.0356760379963917\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.19607843137254902,\n \"acc_stderr\": 0.03950581861179963,\n \"acc_norm\": 0.19607843137254902,\n \"acc_norm_stderr\": 0.03950581861179963\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.4127659574468085,\n \"acc_stderr\": 0.03218471141400351,\n \"acc_norm\": 0.4127659574468085,\n \"acc_norm_stderr\": 0.03218471141400351\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.30701754385964913,\n \"acc_stderr\": 0.04339138322579861,\n \"acc_norm\": 0.30701754385964913,\n \"acc_norm_stderr\": 0.04339138322579861\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.38620689655172413,\n \"acc_stderr\": 0.04057324734419034,\n \"acc_norm\": 0.38620689655172413,\n \"acc_norm_stderr\": 0.04057324734419034\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.23544973544973544,\n \"acc_stderr\": 0.021851509822031722,\n \"acc_norm\": 0.23544973544973544,\n \"acc_norm_stderr\": 0.021851509822031722\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3253968253968254,\n \"acc_stderr\": 0.04190596438871137,\n \"acc_norm\": 0.3253968253968254,\n \"acc_norm_stderr\": 0.04190596438871137\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.3935483870967742,\n \"acc_stderr\": 0.027791878753132274,\n \"acc_norm\": 0.3935483870967742,\n \"acc_norm_stderr\": 0.027791878753132274\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.3054187192118227,\n \"acc_stderr\": 0.03240661565868408,\n \"acc_norm\": 0.3054187192118227,\n \"acc_norm_stderr\": 0.03240661565868408\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.4909090909090909,\n \"acc_stderr\": 0.0390369864774844,\n \"acc_norm\": 0.4909090909090909,\n \"acc_norm_stderr\": 0.0390369864774844\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.41919191919191917,\n \"acc_stderr\": 0.035155207286704175,\n \"acc_norm\": 0.41919191919191917,\n \"acc_norm_stderr\": 0.035155207286704175\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.5958549222797928,\n \"acc_stderr\": 0.0354150857888402,\n \"acc_norm\": 0.5958549222797928,\n \"acc_norm_stderr\": 0.0354150857888402\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.3487179487179487,\n \"acc_stderr\": 0.02416278028401772,\n \"acc_norm\": 0.3487179487179487,\n \"acc_norm_stderr\": 0.02416278028401772\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.24444444444444444,\n \"acc_stderr\": 0.02620276653465215,\n \"acc_norm\": 0.24444444444444444,\n \"acc_norm_stderr\": 0.02620276653465215\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.3445378151260504,\n \"acc_stderr\": 0.030868682604121626,\n \"acc_norm\": 0.3445378151260504,\n \"acc_norm_stderr\": 0.030868682604121626\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.23841059602649006,\n \"acc_stderr\": 0.0347918557259966,\n \"acc_norm\": 0.23841059602649006,\n \"acc_norm_stderr\": 0.0347918557259966\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.48807339449541287,\n \"acc_stderr\": 0.021431223617362223,\n \"acc_norm\": 0.48807339449541287,\n \"acc_norm_stderr\": 0.021431223617362223\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.23148148148148148,\n \"acc_stderr\": 0.028765111718046965,\n \"acc_norm\": 0.23148148148148148,\n \"acc_norm_stderr\": 0.028765111718046965\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.44607843137254904,\n \"acc_stderr\": 0.03488845451304974,\n \"acc_norm\": 0.44607843137254904,\n \"acc_norm_stderr\": 0.03488845451304974\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.4430379746835443,\n \"acc_stderr\": 0.03233532777533484,\n \"acc_norm\": 0.4430379746835443,\n \"acc_norm_stderr\": 0.03233532777533484\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.515695067264574,\n \"acc_stderr\": 0.0335412657542081,\n \"acc_norm\": 0.515695067264574,\n \"acc_norm_stderr\": 0.0335412657542081\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.4732824427480916,\n \"acc_stderr\": 0.04379024936553894,\n \"acc_norm\": 0.4732824427480916,\n \"acc_norm_stderr\": 0.04379024936553894\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.5785123966942148,\n \"acc_stderr\": 0.04507732278775087,\n \"acc_norm\": 0.5785123966942148,\n \"acc_norm_stderr\": 0.04507732278775087\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.42592592592592593,\n \"acc_stderr\": 0.0478034362693679,\n \"acc_norm\": 0.42592592592592593,\n \"acc_norm_stderr\": 0.0478034362693679\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.4294478527607362,\n \"acc_stderr\": 0.03889066619112722,\n \"acc_norm\": 0.4294478527607362,\n \"acc_norm_stderr\": 0.03889066619112722\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4017857142857143,\n \"acc_stderr\": 0.04653333146973647,\n \"acc_norm\": 0.4017857142857143,\n \"acc_norm_stderr\": 0.04653333146973647\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.47572815533980584,\n \"acc_stderr\": 0.049449010929737795,\n \"acc_norm\": 0.47572815533980584,\n \"acc_norm_stderr\": 0.049449010929737795\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.6025641025641025,\n \"acc_stderr\": 0.032059534537892925,\n \"acc_norm\": 0.6025641025641025,\n \"acc_norm_stderr\": 0.032059534537892925\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.5938697318007663,\n \"acc_stderr\": 0.017562037406478923,\n \"acc_norm\": 0.5938697318007663,\n \"acc_norm_stderr\": 0.017562037406478923\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.45664739884393063,\n \"acc_stderr\": 0.02681771813034892,\n \"acc_norm\": 0.45664739884393063,\n \"acc_norm_stderr\": 0.02681771813034892\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.40522875816993464,\n \"acc_stderr\": 0.02811092849280908,\n \"acc_norm\": 0.40522875816993464,\n \"acc_norm_stderr\": 0.02811092849280908\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.5273311897106109,\n \"acc_stderr\": 0.02835563356832818,\n \"acc_norm\": 0.5273311897106109,\n \"acc_norm_stderr\": 0.02835563356832818\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.4567901234567901,\n \"acc_stderr\": 0.027716661650194045,\n \"acc_norm\": 0.4567901234567901,\n \"acc_norm_stderr\": 0.027716661650194045\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.34397163120567376,\n \"acc_stderr\": 0.028338017428611327,\n \"acc_norm\": 0.34397163120567376,\n \"acc_norm_stderr\": 0.028338017428611327\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.33572359843546284,\n \"acc_stderr\": 0.012061304157664604,\n \"acc_norm\": 0.33572359843546284,\n \"acc_norm_stderr\": 0.012061304157664604\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.36764705882352944,\n \"acc_stderr\": 0.029289413409403196,\n \"acc_norm\": 0.36764705882352944,\n \"acc_norm_stderr\": 0.029289413409403196\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.4150326797385621,\n \"acc_stderr\": 0.01993362777685741,\n \"acc_norm\": 0.4150326797385621,\n \"acc_norm_stderr\": 0.01993362777685741\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5181818181818182,\n \"acc_stderr\": 0.04785964010794917,\n \"acc_norm\": 0.5181818181818182,\n \"acc_norm_stderr\": 0.04785964010794917\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.27755102040816326,\n \"acc_stderr\": 0.02866685779027465,\n \"acc_norm\": 0.27755102040816326,\n \"acc_norm_stderr\": 0.02866685779027465\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.5074626865671642,\n \"acc_stderr\": 0.035351400842767194,\n \"acc_norm\": 0.5074626865671642,\n \"acc_norm_stderr\": 0.035351400842767194\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3674698795180723,\n \"acc_stderr\": 0.03753267402120575,\n \"acc_norm\": 0.3674698795180723,\n \"acc_norm_stderr\": 0.03753267402120575\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.6081871345029239,\n \"acc_stderr\": 0.037439798259263996,\n \"acc_norm\": 0.6081871345029239,\n \"acc_norm_stderr\": 0.037439798259263996\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2937576499388005,\n \"mc1_stderr\": 0.015945068581236618,\n \"mc2\": 0.42921423081004945,\n \"mc2_stderr\": 0.014206971382449723\n }\n}\n```", "repo_url": "https://huggingface.co/psyche/kollama2-7b-v3", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|arc:challenge|25_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hellaswag|10_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-28T08:31:05.396495.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-28T08:31:05.396495.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_28T08_31_05.396495", "path": ["results_2023-08-28T08:31:05.396495.parquet"]}, {"split": "latest", "path": ["results_2023-08-28T08:31:05.396495.parquet"]}]}]}
2023-08-28T07:32:21+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of psyche/kollama2-7b-v3 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model psyche/kollama2-7b-v3 on the Open LLM Leaderboard. The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-08-28T08:31:05.396495: ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of psyche/kollama2-7b-v3", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model psyche/kollama2-7b-v3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-08-28T08:31:05.396495:", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of psyche/kollama2-7b-v3", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model psyche/kollama2-7b-v3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-08-28T08:31:05.396495:", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 20, 31, 168, 22, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of psyche/kollama2-7b-v3## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model psyche/kollama2-7b-v3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-08-28T08:31:05.396495:### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
b77f1f1d2b5887e2fdb916bb9e7785c70e9fdfb9
Original data is [Antrophic/hh-rlhf](https://huggingface.co/datasets/Anthropic/hh-rlhf). It was converted to Vicuna format sample ``` {'chosen': {'from': 'gpt', 'value': 'A horseshoe is usually made out of metal and is about 3 to 3.5 inches long and around 1 inch thick. The horseshoe should also have a 2 inch by 3 inch flat at the bottom where the rubber meets the metal. We also need two stakes and six horseshoes.'}, 'rejected': {'from': 'gpt', 'value': 'Horseshoes are either metal or plastic discs. The horseshoes come in different weights, and the lighter ones are easier to throw, so they are often the standard for beginning players.'}, 'source': 'helpful-base', 'context': [{'from': 'human', 'value': 'Hi, I want to learn to play horseshoes. Can you teach me?'}, {'from': 'gpt', 'value': 'I can, but maybe I should begin by telling you that a typical game consists of 2 players and 6 or 8 horseshoes.'}], 'instruction': {'from': 'human', 'value': 'Okay. What else is needed to play, and what are the rules?'}} ``` source columns has 4 values ``` {'harmless-base', 'helpful-base', 'helpful-online', 'helpful-rejection-sampled'} ``` In context, chosen, rejected column, 'from' key is either 'human' or 'gpt'.
heegyu/hh-rlhf-vicuna-format
[ "region:us" ]
2023-08-28T07:37:18+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "chosen", "struct": [{"name": "from", "dtype": "string"}, {"name": "value", "dtype": "string"}]}, {"name": "rejected", "struct": [{"name": "from", "dtype": "string"}, {"name": "value", "dtype": "string"}]}, {"name": "source", "dtype": "string"}, {"name": "context", "list": [{"name": "from", "dtype": "string"}, {"name": "value", "dtype": "string"}]}, {"name": "instruction", "struct": [{"name": "from", "dtype": "string"}, {"name": "value", "dtype": "string"}]}], "splits": [{"name": "train", "num_bytes": 210193081, "num_examples": 160800}, {"name": "test", "num_bytes": 11248194, "num_examples": 8552}], "download_size": 128509060, "dataset_size": 221441275}}
2023-09-06T02:07:11+00:00
[]
[]
TAGS #region-us
Original data is Antrophic/hh-rlhf. It was converted to Vicuna format sample source columns has 4 values In context, chosen, rejected column, 'from' key is either 'human' or 'gpt'.
[]
[ "TAGS\n#region-us \n" ]
[ 6 ]
[ "passage: TAGS\n#region-us \n" ]
bfbc009279bda282c6123f4a0d594693a58ef90d
Updated on https://huggingface.co/datasets/voidful/2WikiMultihopQA/blob/main/dev.json with modifications.
thinkall/2WikiMultihopQA
[ "license:apache-2.0", "region:us" ]
2023-08-28T07:43:17+00:00
{"license": "apache-2.0"}
2023-08-28T23:30:26+00:00
[]
[]
TAGS #license-apache-2.0 #region-us
Updated on URL with modifications.
[]
[ "TAGS\n#license-apache-2.0 #region-us \n" ]
[ 14 ]
[ "passage: TAGS\n#license-apache-2.0 #region-us \n" ]
add28cc3f016049b179273de71b6e1c35d602ff8
# Sentiment-DEMO-abc123 Created from AIOD platform
mtkinit/Sentiment-DEMO-abc123
[ "region:us" ]
2023-08-28T07:51:02+00:00
{"pretty_name": "Sentiment-DEMO-abc123"}
2023-08-28T07:51:03+00:00
[]
[]
TAGS #region-us
# Sentiment-DEMO-abc123 Created from AIOD platform
[ "# Sentiment-DEMO-abc123\nCreated from AIOD platform" ]
[ "TAGS\n#region-us \n", "# Sentiment-DEMO-abc123\nCreated from AIOD platform" ]
[ 6, 16 ]
[ "passage: TAGS\n#region-us \n# Sentiment-DEMO-abc123\nCreated from AIOD platform" ]
240fc800c4695d86ce0f32a713e199cd2ba66ab6
# ELYZA-tasks-100: 日本語instructionモデル評価データセット ![Imgur](images/key_visual.png) ## Data Description 本データセットはinstruction-tuningを行ったモデルの評価用データセットです。詳細は [リリースのnote記事](https://note.com/elyza/n/na405acaca130) を参照してください。 特徴: - 複雑な指示・タスクを含む100件の日本語データです。 - 役に立つAIアシスタントとして、丁寧な出力が求められます。 - 全てのデータに対して評価観点がアノテーションされており、評価の揺らぎを抑えることが期待されます。 具体的には以下のようなタスクを含みます。 - 要約を修正し、修正箇所を説明するタスク - 具体的なエピソードから抽象的な教訓を述べるタスク - ユーザーの意図を汲み役に立つAIアシスタントとして振る舞うタスク - 場合分けを必要とする複雑な算数のタスク - 未知の言語からパターンを抽出し日本語訳する高度な推論を必要とするタスク - 複数の指示を踏まえた上でyoutubeの対話を生成するタスク - 架空の生き物や熟語に関する生成・大喜利などの想像力が求められるタスク ## Usage datasetsライブラリから利用が可能です。 ```py >>> from datasets import load_dataset >>> ds = load_dataset("elyza/ELYZA-tasks-100") >>> ds DatasetDict({ test: Dataset({ features: ["input", "output", "eval_aspect"], num_rows: 100 }) }) >>> ds["test"][0] { 'input': '仕事の熱意を取り戻すためのアイデアを5つ挙げてください。', 'output': '1. 自分の仕事に対する興味を再発見するために、新しい技能や知識を学ぶこと。\n2. カレッジやセミナーなどで講演を聴くことで、仕事に対する新しいアイデアや視点を得ること。\n3. 仕事に対してストレスを感じている場合は、ストレスマネジメントのテクニックを学ぶこと。\n4. 仕事以外の楽しいことをすることで、ストレスを発散すること。\n5. 仕事に対して自己評価をすることで、自分がどのように進化しているのかを知ること。', 'eval_aspect': '- 熱意を取り戻すのではなく、仕事の効率化・スキルアップのような文脈になっていたら1点減点\n- 出したアイデアが5つより多い、少ない場合は1点減点\n- 5つのアイデアのうち、内容が重複しているものがあれば1点減点\n\n' } ``` ## Baseline Evaluation 本データセットは手動/自動, 絶対/相対 評価のいずれの評価形式でも利用していただくことができますが、今回我々はベースラインモデルの評価として、5段階の絶対評価を手動で行いました。 ### 評価手順 1. [こちらの推論スクリプト](https://huggingface.co/datasets/elyza/ELYZA-tasks-100/tree/main/baseline/scripts)のようにベースラインとなるモデルでの推論を行い、[baseline/preds](https://huggingface.co/datasets/elyza/ELYZA-tasks-100/tree/main/baseline/preds)以下に推論結果を格納しました。 - 基本的にgenerate時のパラメータはREADMEなどに記載されているデフォルト値を用いました。 2. [shuffle_for_humaneval.py](https://huggingface.co/datasets/elyza/ELYZA-tasks-100/blob/main/baseline/humaneval/shuffle_for_humaneval.py)を用いて匿名化されたモデルの推論結果 [shuffled_preds.csv](https://huggingface.co/datasets/elyza/ELYZA-tasks-100/blob/main/baseline/humaneval/shuffled_preds.csv) と匿名化を復元するための対応表 [uuids.csv](https://huggingface.co/datasets/elyza/ELYZA-tasks-100/blob/main/baseline/humaneval/uuids.csv) を作成しました。 3. [shuffled_preds.csv](https://huggingface.co/datasets/elyza/ELYZA-tasks-100/blob/main/baseline/humaneval/shuffled_preds.csv) を Googleスプレッドシートにアップロードし、[評価ガイドライン](https://huggingface.co/datasets/elyza/ELYZA-tasks-100/blob/main/baseline/humaneval/guideline.md) に従って、各データ3人で人手評価を行いました。 4. スプレッドシートでの評価結果を[annotated_shuffled_preds.xlsx](https://huggingface.co/datasets/elyza/ELYZA-tasks-100/blob/main/baseline/humaneval/annotated_shuffled_preds.xlsx)としてダウンロードし、 [deshuffle_annotations.py](https://huggingface.co/datasets/elyza/ELYZA-tasks-100/blob/main/baseline/humaneval/deshuffle_annotations.py) を利用し、匿名化された評価結果を復号して[annotated_deshuffled_preds.csv](https://huggingface.co/datasets/elyza/ELYZA-tasks-100/blob/main/baseline/humaneval/annotated_deshuffled_preds.csv) として保存しました。 5. 最後にGoogleスプレッドシートに[評価結果シート](https://docs.google.com/spreadsheets/d/1mtoy4QAqDPk2f_B0vDogFoOrbA5G42DBEEHdqM4VmDI/edit#gid=1023787356)にアップロードして可視化しました。 ### 評価結果 - スコアについては、[リリースのnote記事](https://note.com/elyza/n/na405acaca130) を参照してください。 - [評価結果シート](https://docs.google.com/spreadsheets/d/1mtoy4QAqDPk2f_B0vDogFoOrbA5G42DBEEHdqM4VmDI/edit#gid=1023787356): - 全ての入出力と評価を公開しています。スコアだけでは分からないモデルの傾向を知ることができます。 ### 評価手法の妥当性について [zennの技術ブログ](https://zenn.dev/elyza/articles/5e7d9373c32a98)にて今回のベースラインの評価の詳細な分析についての記事を書きました。よければそちらもご覧ください。 ## GPT4での自動評価について こちらも[zennの技術ブログ](https://zenn.dev/elyza/articles/5e7d9373c32a98)にて実際にGPT4での評価を行う際のコードと結果を示しています。 ## Developers 以下アルファベット順です。 - [Akira Sasaki](https://huggingface.co/akirasasaki) - [Masato Hirakawa](https://huggingface.co/m-hirakawa) - [Shintaro Horie](https://huggingface.co/e-mon) - [Tomoaki Nakamura](https://huggingface.co/tyoyo) ## License ![license-badge](https://i.creativecommons.org/l/by-sa/4.0/88x31.png) このデータセットは [CC BY-SA 4.0](https://creativecommons.org/licenses/by-sa/4.0/deed.ja) でライセンスされています。 ## How to Cite ```tex @misc{elyzatasks100, title={ELYZA-tasks-100: 日本語instructionモデル評価データセット}, url={https://huggingface.co/elyza/ELYZA-tasks-100}, author={Akira Sasaki and Masato Hirakawa and Shintaro Horie and Tomoaki Nakamura}, year={2023}, } ``` ## Citations ```tex @misc{touvron2023llama, title={Llama 2: Open Foundation and Fine-Tuned Chat Models}, author={Hugo Touvron and Louis Martin and Kevin Stone and Peter Albert and Amjad Almahairi and Yasmine Babaei and Nikolay Bashlykov and Soumya Batra and Prajjwal Bhargava and Shruti Bhosale and Dan Bikel and Lukas Blecher and Cristian Canton Ferrer and Moya Chen and Guillem Cucurull and David Esiobu and Jude Fernandes and Jeremy Fu and Wenyin Fu and Brian Fuller and Cynthia Gao and Vedanuj Goswami and Naman Goyal and Anthony Hartshorn and Saghar Hosseini and Rui Hou and Hakan Inan and Marcin Kardas and Viktor Kerkez and Madian Khabsa and Isabel Kloumann and Artem Korenev and Punit Singh Koura and Marie-Anne Lachaux and Thibaut Lavril and Jenya Lee and Diana Liskovich and Yinghai Lu and Yuning Mao and Xavier Martinet and Todor Mihaylov and Pushkar Mishra and Igor Molybog and Yixin Nie and Andrew Poulton and Jeremy Reizenstein and Rashi Rungta and Kalyan Saladi and Alan Schelten and Ruan Silva and Eric Michael Smith and Ranjan Subramanian and Xiaoqing Ellen Tan and Binh Tang and Ross Taylor and Adina Williams and Jian Xiang Kuan and Puxin Xu and Zheng Yan and Iliyan Zarov and Yuchen Zhang and Angela Fan and Melanie Kambadur and Sharan Narang and Aurelien Rodriguez and Robert Stojnic and Sergey Edunov and Thomas Scialom}, year={2023}, eprint={2307.09288}, archivePrefix={arXiv}, primaryClass={cs.CL} } ```
elyza/ELYZA-tasks-100
[ "task_categories:text2text-generation", "size_categories:n<1K", "language:ja", "license:cc-by-sa-4.0", "arxiv:2307.09288", "region:us" ]
2023-08-28T08:01:44+00:00
{"language": ["ja"], "license": "cc-by-sa-4.0", "size_categories": ["n<1K"], "task_categories": ["text2text-generation"]}
2023-12-27T09:17:36+00:00
[ "2307.09288" ]
[ "ja" ]
TAGS #task_categories-text2text-generation #size_categories-n<1K #language-Japanese #license-cc-by-sa-4.0 #arxiv-2307.09288 #region-us
# ELYZA-tasks-100: 日本語instructionモデル評価データセット !Imgur ## Data Description 本データセットはinstruction-tuningを行ったモデルの評価用データセットです。詳細は リリースのnote記事 を参照してください。 特徴: - 複雑な指示・タスクを含む100件の日本語データです。 - 役に立つAIアシスタントとして、丁寧な出力が求められます。 - 全てのデータに対して評価観点がアノテーションされており、評価の揺らぎを抑えることが期待されます。 具体的には以下のようなタスクを含みます。 - 要約を修正し、修正箇所を説明するタスク - 具体的なエピソードから抽象的な教訓を述べるタスク - ユーザーの意図を汲み役に立つAIアシスタントとして振る舞うタスク - 場合分けを必要とする複雑な算数のタスク - 未知の言語からパターンを抽出し日本語訳する高度な推論を必要とするタスク - 複数の指示を踏まえた上でyoutubeの対話を生成するタスク - 架空の生き物や熟語に関する生成・大喜利などの想像力が求められるタスク ## Usage datasetsライブラリから利用が可能です。 ## Baseline Evaluation 本データセットは手動/自動, 絶対/相対 評価のいずれの評価形式でも利用していただくことができますが、今回我々はベースラインモデルの評価として、5段階の絶対評価を手動で行いました。 ### 評価手順 1. こちらの推論スクリプトのようにベースラインとなるモデルでの推論を行い、baseline/preds以下に推論結果を格納しました。 - 基本的にgenerate時のパラメータはREADMEなどに記載されているデフォルト値を用いました。 2. shuffle_for_humaneval.pyを用いて匿名化されたモデルの推論結果 shuffled_preds.csv と匿名化を復元するための対応表 URL を作成しました。 3. shuffled_preds.csv を Googleスプレッドシートにアップロードし、評価ガイドライン に従って、各データ3人で人手評価を行いました。 4. スプレッドシートでの評価結果をannotated_shuffled_preds.xlsxとしてダウンロードし、 deshuffle_annotations.py を利用し、匿名化された評価結果を復号してannotated_deshuffled_preds.csv として保存しました。 5. 最後にGoogleスプレッドシートに評価結果シートにアップロードして可視化しました。 ### 評価結果 - スコアについては、リリースのnote記事 を参照してください。 - 評価結果シート: - 全ての入出力と評価を公開しています。スコアだけでは分からないモデルの傾向を知ることができます。 ### 評価手法の妥当性について zennの技術ブログにて今回のベースラインの評価の詳細な分析についての記事を書きました。よければそちらもご覧ください。 ## GPT4での自動評価について こちらもzennの技術ブログにて実際にGPT4での評価を行う際のコードと結果を示しています。 ## Developers 以下アルファベット順です。 - Akira Sasaki - Masato Hirakawa - Shintaro Horie - Tomoaki Nakamura ## License !license-badge このデータセットは CC BY-SA 4.0 でライセンスされています。 ## How to Cite s
[ "# ELYZA-tasks-100: 日本語instructionモデル評価データセット\n\n!Imgur", "## Data Description\n\n本データセットはinstruction-tuningを行ったモデルの評価用データセットです。詳細は リリースのnote記事 を参照してください。\n\n特徴:\n\n- 複雑な指示・タスクを含む100件の日本語データです。\n- 役に立つAIアシスタントとして、丁寧な出力が求められます。\n- 全てのデータに対して評価観点がアノテーションされており、評価の揺らぎを抑えることが期待されます。\n\n具体的には以下のようなタスクを含みます。\n\n- 要約を修正し、修正箇所を説明するタスク\n- 具体的なエピソードから抽象的な教訓を述べるタスク\n- ユーザーの意図を汲み役に立つAIアシスタントとして振る舞うタスク\n- 場合分けを必要とする複雑な算数のタスク\n- 未知の言語からパターンを抽出し日本語訳する高度な推論を必要とするタスク\n- 複数の指示を踏まえた上でyoutubeの対話を生成するタスク\n- 架空の生き物や熟語に関する生成・大喜利などの想像力が求められるタスク", "## Usage\n\ndatasetsライブラリから利用が可能です。", "## Baseline Evaluation\n\n本データセットは手動/自動, 絶対/相対 評価のいずれの評価形式でも利用していただくことができますが、今回我々はベースラインモデルの評価として、5段階の絶対評価を手動で行いました。", "### 評価手順\n\n1. こちらの推論スクリプトのようにベースラインとなるモデルでの推論を行い、baseline/preds以下に推論結果を格納しました。\n - 基本的にgenerate時のパラメータはREADMEなどに記載されているデフォルト値を用いました。\n2. shuffle_for_humaneval.pyを用いて匿名化されたモデルの推論結果 shuffled_preds.csv と匿名化を復元するための対応表 URL を作成しました。\n3. shuffled_preds.csv を Googleスプレッドシートにアップロードし、評価ガイドライン に従って、各データ3人で人手評価を行いました。\n4. スプレッドシートでの評価結果をannotated_shuffled_preds.xlsxとしてダウンロードし、 deshuffle_annotations.py を利用し、匿名化された評価結果を復号してannotated_deshuffled_preds.csv として保存しました。\n5. 最後にGoogleスプレッドシートに評価結果シートにアップロードして可視化しました。", "### 評価結果\n\n- スコアについては、リリースのnote記事 を参照してください。\n- 評価結果シート:\n - 全ての入出力と評価を公開しています。スコアだけでは分からないモデルの傾向を知ることができます。", "### 評価手法の妥当性について\n\nzennの技術ブログにて今回のベースラインの評価の詳細な分析についての記事を書きました。よければそちらもご覧ください。", "## GPT4での自動評価について\n\nこちらもzennの技術ブログにて実際にGPT4での評価を行う際のコードと結果を示しています。", "## Developers\n\n以下アルファベット順です。\n\n- Akira Sasaki\n- Masato Hirakawa\n- Shintaro Horie\n- Tomoaki Nakamura", "## License\n\n!license-badge\n\nこのデータセットは CC BY-SA 4.0 でライセンスされています。", "## How to Cite\n\n\n\ns" ]
[ "TAGS\n#task_categories-text2text-generation #size_categories-n<1K #language-Japanese #license-cc-by-sa-4.0 #arxiv-2307.09288 #region-us \n", "# ELYZA-tasks-100: 日本語instructionモデル評価データセット\n\n!Imgur", "## Data Description\n\n本データセットはinstruction-tuningを行ったモデルの評価用データセットです。詳細は リリースのnote記事 を参照してください。\n\n特徴:\n\n- 複雑な指示・タスクを含む100件の日本語データです。\n- 役に立つAIアシスタントとして、丁寧な出力が求められます。\n- 全てのデータに対して評価観点がアノテーションされており、評価の揺らぎを抑えることが期待されます。\n\n具体的には以下のようなタスクを含みます。\n\n- 要約を修正し、修正箇所を説明するタスク\n- 具体的なエピソードから抽象的な教訓を述べるタスク\n- ユーザーの意図を汲み役に立つAIアシスタントとして振る舞うタスク\n- 場合分けを必要とする複雑な算数のタスク\n- 未知の言語からパターンを抽出し日本語訳する高度な推論を必要とするタスク\n- 複数の指示を踏まえた上でyoutubeの対話を生成するタスク\n- 架空の生き物や熟語に関する生成・大喜利などの想像力が求められるタスク", "## Usage\n\ndatasetsライブラリから利用が可能です。", "## Baseline Evaluation\n\n本データセットは手動/自動, 絶対/相対 評価のいずれの評価形式でも利用していただくことができますが、今回我々はベースラインモデルの評価として、5段階の絶対評価を手動で行いました。", "### 評価手順\n\n1. こちらの推論スクリプトのようにベースラインとなるモデルでの推論を行い、baseline/preds以下に推論結果を格納しました。\n - 基本的にgenerate時のパラメータはREADMEなどに記載されているデフォルト値を用いました。\n2. shuffle_for_humaneval.pyを用いて匿名化されたモデルの推論結果 shuffled_preds.csv と匿名化を復元するための対応表 URL を作成しました。\n3. shuffled_preds.csv を Googleスプレッドシートにアップロードし、評価ガイドライン に従って、各データ3人で人手評価を行いました。\n4. スプレッドシートでの評価結果をannotated_shuffled_preds.xlsxとしてダウンロードし、 deshuffle_annotations.py を利用し、匿名化された評価結果を復号してannotated_deshuffled_preds.csv として保存しました。\n5. 最後にGoogleスプレッドシートに評価結果シートにアップロードして可視化しました。", "### 評価結果\n\n- スコアについては、リリースのnote記事 を参照してください。\n- 評価結果シート:\n - 全ての入出力と評価を公開しています。スコアだけでは分からないモデルの傾向を知ることができます。", "### 評価手法の妥当性について\n\nzennの技術ブログにて今回のベースラインの評価の詳細な分析についての記事を書きました。よければそちらもご覧ください。", "## GPT4での自動評価について\n\nこちらもzennの技術ブログにて実際にGPT4での評価を行う際のコードと結果を示しています。", "## Developers\n\n以下アルファベット順です。\n\n- Akira Sasaki\n- Masato Hirakawa\n- Shintaro Horie\n- Tomoaki Nakamura", "## License\n\n!license-badge\n\nこのデータセットは CC BY-SA 4.0 でライセンスされています。", "## How to Cite\n\n\n\ns" ]
[ 54, 19, 240, 13, 54, 242, 50, 36, 31, 34, 24, 6 ]
[ "passage: TAGS\n#task_categories-text2text-generation #size_categories-n<1K #language-Japanese #license-cc-by-sa-4.0 #arxiv-2307.09288 #region-us \n# ELYZA-tasks-100: 日本語instructionモデル評価データセット\n\n!Imgur## Data Description\n\n本データセットはinstruction-tuningを行ったモデルの評価用データセットです。詳細は リリースのnote記事 を参照してください。\n\n特徴:\n\n- 複雑な指示・タスクを含む100件の日本語データです。\n- 役に立つAIアシスタントとして、丁寧な出力が求められます。\n- 全てのデータに対して評価観点がアノテーションされており、評価の揺らぎを抑えることが期待されます。\n\n具体的には以下のようなタスクを含みます。\n\n- 要約を修正し、修正箇所を説明するタスク\n- 具体的なエピソードから抽象的な教訓を述べるタスク\n- ユーザーの意図を汲み役に立つAIアシスタントとして振る舞うタスク\n- 場合分けを必要とする複雑な算数のタスク\n- 未知の言語からパターンを抽出し日本語訳する高度な推論を必要とするタスク\n- 複数の指示を踏まえた上でyoutubeの対話を生成するタスク\n- 架空の生き物や熟語に関する生成・大喜利などの想像力が求められるタスク## Usage\n\ndatasetsライブラリから利用が可能です。## Baseline Evaluation\n\n本データセットは手動/自動, 絶対/相対 評価のいずれの評価形式でも利用していただくことができますが、今回我々はベースラインモデルの評価として、5段階の絶対評価を手動で行いました。" ]
a7463935705af0e6ad4c73860569a711e07d3cef
# Dataset Card for linux-man-pages-tldr-summarized ### Dataset Summary This dataset contains linux man pages downloaded from [man7](https://man7.org/), with a prefix: 'summarize: ', and the corresponding summarization downloaded from [TLDR-pages](https://github.com/tldr-pages/tldr/). ### Supported Tasks This dataset should be used to fine-tune language models for summarization tasks.
tmskss/linux-man-pages-tldr-summarized
[ "task_categories:summarization", "language:en", "region:us" ]
2023-08-28T08:02:16+00:00
{"language": ["en"], "task_categories": ["summarization"], "pretty_name": "Linux man pages and the corresponding TLDR page"}
2023-08-29T12:36:33+00:00
[]
[ "en" ]
TAGS #task_categories-summarization #language-English #region-us
# Dataset Card for linux-man-pages-tldr-summarized ### Dataset Summary This dataset contains linux man pages downloaded from man7, with a prefix: 'summarize: ', and the corresponding summarization downloaded from TLDR-pages. ### Supported Tasks This dataset should be used to fine-tune language models for summarization tasks.
[ "# Dataset Card for linux-man-pages-tldr-summarized", "### Dataset Summary\n\nThis dataset contains linux man pages downloaded from man7, with a prefix: 'summarize: ', and the corresponding summarization downloaded from TLDR-pages.", "### Supported Tasks\n\nThis dataset should be used to fine-tune language models for summarization tasks." ]
[ "TAGS\n#task_categories-summarization #language-English #region-us \n", "# Dataset Card for linux-man-pages-tldr-summarized", "### Dataset Summary\n\nThis dataset contains linux man pages downloaded from man7, with a prefix: 'summarize: ', and the corresponding summarization downloaded from TLDR-pages.", "### Supported Tasks\n\nThis dataset should be used to fine-tune language models for summarization tasks." ]
[ 20, 20, 48, 25 ]
[ "passage: TAGS\n#task_categories-summarization #language-English #region-us \n# Dataset Card for linux-man-pages-tldr-summarized### Dataset Summary\n\nThis dataset contains linux man pages downloaded from man7, with a prefix: 'summarize: ', and the corresponding summarization downloaded from TLDR-pages.### Supported Tasks\n\nThis dataset should be used to fine-tune language models for summarization tasks." ]
2e2bbb590daee63d06877fafe9c7bf5d1b71d421
## Model description This data from UIT aka University of Information Technology It contain 7 class 'Other', 'Disgust', 'Enjoyment', 'Anger', 'Surprise', 'Sadness', 'Fear' ## Contributions Thanks to ViDataset - Vietnamese Datasets for Natural Language Processing for sharing this dataset.
duwuonline/UIT-VSMEC
[ "task_categories:text-classification", "language:vi", "license:other", "sentiment", "classificati", "region:us" ]
2023-08-28T08:03:57+00:00
{"language": ["vi"], "license": "other", "task_categories": ["text-classification"], "tags": ["sentiment", "classificati"]}
2023-08-28T08:14:35+00:00
[]
[ "vi" ]
TAGS #task_categories-text-classification #language-Vietnamese #license-other #sentiment #classificati #region-us
## Model description This data from UIT aka University of Information Technology It contain 7 class 'Other', 'Disgust', 'Enjoyment', 'Anger', 'Surprise', 'Sadness', 'Fear' ## Contributions Thanks to ViDataset - Vietnamese Datasets for Natural Language Processing for sharing this dataset.
[ "## Model description\nThis data from UIT aka University of Information Technology\n\nIt contain 7 class 'Other', 'Disgust', 'Enjoyment', 'Anger', 'Surprise', 'Sadness', 'Fear'", "## Contributions\nThanks to ViDataset - Vietnamese Datasets for Natural Language Processing for sharing this dataset." ]
[ "TAGS\n#task_categories-text-classification #language-Vietnamese #license-other #sentiment #classificati #region-us \n", "## Model description\nThis data from UIT aka University of Information Technology\n\nIt contain 7 class 'Other', 'Disgust', 'Enjoyment', 'Anger', 'Surprise', 'Sadness', 'Fear'", "## Contributions\nThanks to ViDataset - Vietnamese Datasets for Natural Language Processing for sharing this dataset." ]
[ 36, 53, 26 ]
[ "passage: TAGS\n#task_categories-text-classification #language-Vietnamese #license-other #sentiment #classificati #region-us \n## Model description\nThis data from UIT aka University of Information Technology\n\nIt contain 7 class 'Other', 'Disgust', 'Enjoyment', 'Anger', 'Surprise', 'Sadness', 'Fear'## Contributions\nThanks to ViDataset - Vietnamese Datasets for Natural Language Processing for sharing this dataset." ]
c338ae2c5c91de24689dc0de23b1c2abb0a88817
# Dataset Card for "llama2_classifying_and_explainning_v3" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
RikoteMaster/llama2_classifying_and_explainning_v3
[ "region:us" ]
2023-08-28T08:08:30+00:00
{"dataset_info": {"features": [{"name": "Explanation", "dtype": "string"}, {"name": "Text_processed", "dtype": "string"}, {"name": "Emotion", "dtype": "string"}, {"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 22682634, "num_examples": 20188}], "download_size": 6798524, "dataset_size": 22682634}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-08-28T08:08:33+00:00
[]
[]
TAGS #region-us
# Dataset Card for "llama2_classifying_and_explainning_v3" More Information needed
[ "# Dataset Card for \"llama2_classifying_and_explainning_v3\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"llama2_classifying_and_explainning_v3\"\n\nMore Information needed" ]
[ 6, 25 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"llama2_classifying_and_explainning_v3\"\n\nMore Information needed" ]
a64029e954bc2e72a289b899caa3db8fe7cf93df
# Dataset Card for Evaluation run of Taekyoon/llama2-ko-7b-test ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Taekyoon/llama2-ko-7b-test - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Taekyoon/llama2-ko-7b-test](https://huggingface.co/Taekyoon/llama2-ko-7b-test) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Taekyoon__llama2-ko-7b-test", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-28T09:09:02.494936](https://huggingface.co/datasets/open-llm-leaderboard/details_Taekyoon__llama2-ko-7b-test/blob/main/results_2023-08-28T09%3A09%3A02.494936.json): ```python { "all": { "acc": 0.29939516770443486, "acc_stderr": 0.03301601100044412, "acc_norm": 0.3025804071569399, "acc_norm_stderr": 0.03301705555289615, "mc1": 0.2252141982864137, "mc1_stderr": 0.014623240768023505, "mc2": 0.35997350772493825, "mc2_stderr": 0.014042588070598837 }, "harness|arc:challenge|25": { "acc": 0.3506825938566553, "acc_stderr": 0.013944635930726089, "acc_norm": 0.3779863481228669, "acc_norm_stderr": 0.014169664520303105 }, "harness|hellaswag|10": { "acc": 0.4697271459868552, "acc_stderr": 0.004980627287147587, "acc_norm": 0.6303525194184425, "acc_norm_stderr": 0.004817227292240292 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.23, "acc_stderr": 0.04229525846816505, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816505 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.2518518518518518, "acc_stderr": 0.03749850709174022, "acc_norm": 0.2518518518518518, "acc_norm_stderr": 0.03749850709174022 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.3223684210526316, "acc_stderr": 0.03803510248351585, "acc_norm": 0.3223684210526316, "acc_norm_stderr": 0.03803510248351585 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.3320754716981132, "acc_stderr": 0.02898545565233439, "acc_norm": 0.3320754716981132, "acc_norm_stderr": 0.02898545565233439 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.24305555555555555, "acc_stderr": 0.03586879280080341, "acc_norm": 0.24305555555555555, "acc_norm_stderr": 0.03586879280080341 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.2774566473988439, "acc_stderr": 0.034140140070440354, "acc_norm": 0.2774566473988439, "acc_norm_stderr": 0.034140140070440354 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237657, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237657 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.34, "acc_stderr": 0.047609522856952344, "acc_norm": 0.34, "acc_norm_stderr": 0.047609522856952344 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.32340425531914896, "acc_stderr": 0.030579442773610334, "acc_norm": 0.32340425531914896, "acc_norm_stderr": 0.030579442773610334 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.044346007015849245, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.044346007015849245 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2413793103448276, "acc_stderr": 0.03565998174135303, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.03565998174135303 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.022569897074918424, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.022569897074918424 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.15079365079365079, "acc_stderr": 0.032006864972873916, "acc_norm": 0.15079365079365079, "acc_norm_stderr": 0.032006864972873916 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.23225806451612904, "acc_stderr": 0.02402225613030824, "acc_norm": 0.23225806451612904, "acc_norm_stderr": 0.02402225613030824 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.26108374384236455, "acc_stderr": 0.030903796952114485, "acc_norm": 0.26108374384236455, "acc_norm_stderr": 0.030903796952114485 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.2787878787878788, "acc_stderr": 0.03501438706296781, "acc_norm": 0.2787878787878788, "acc_norm_stderr": 0.03501438706296781 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.37373737373737376, "acc_stderr": 0.034468977386593325, "acc_norm": 0.37373737373737376, "acc_norm_stderr": 0.034468977386593325 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.3471502590673575, "acc_stderr": 0.03435696168361355, "acc_norm": 0.3471502590673575, "acc_norm_stderr": 0.03435696168361355 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.32051282051282054, "acc_stderr": 0.023661296393964273, "acc_norm": 0.32051282051282054, "acc_norm_stderr": 0.023661296393964273 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.26666666666666666, "acc_stderr": 0.026962424325073835, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.026962424325073835 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.29411764705882354, "acc_stderr": 0.029597329730978086, "acc_norm": 0.29411764705882354, "acc_norm_stderr": 0.029597329730978086 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.39072847682119205, "acc_stderr": 0.039837983066598096, "acc_norm": 0.39072847682119205, "acc_norm_stderr": 0.039837983066598096 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.3357798165137615, "acc_stderr": 0.020248081396752937, "acc_norm": 0.3357798165137615, "acc_norm_stderr": 0.020248081396752937 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4675925925925926, "acc_stderr": 0.03402801581358966, "acc_norm": 0.4675925925925926, "acc_norm_stderr": 0.03402801581358966 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.24509803921568626, "acc_stderr": 0.030190282453501954, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.030190282453501954 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.270042194092827, "acc_stderr": 0.028900721906293426, "acc_norm": 0.270042194092827, "acc_norm_stderr": 0.028900721906293426 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.34080717488789236, "acc_stderr": 0.0318114974705536, "acc_norm": 0.34080717488789236, "acc_norm_stderr": 0.0318114974705536 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.3053435114503817, "acc_stderr": 0.040393149787245626, "acc_norm": 0.3053435114503817, "acc_norm_stderr": 0.040393149787245626 }, "harness|hendrycksTest-international_law|5": { "acc": 0.3305785123966942, "acc_stderr": 0.04294340845212094, "acc_norm": 0.3305785123966942, "acc_norm_stderr": 0.04294340845212094 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.2777777777777778, "acc_stderr": 0.043300437496507416, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.043300437496507416 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22085889570552147, "acc_stderr": 0.032591773927421776, "acc_norm": 0.22085889570552147, "acc_norm_stderr": 0.032591773927421776 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.20535714285714285, "acc_stderr": 0.038342410214190735, "acc_norm": 0.20535714285714285, "acc_norm_stderr": 0.038342410214190735 }, "harness|hendrycksTest-management|5": { "acc": 0.36893203883495146, "acc_stderr": 0.04777615181156739, "acc_norm": 0.36893203883495146, "acc_norm_stderr": 0.04777615181156739 }, "harness|hendrycksTest-marketing|5": { "acc": 0.32051282051282054, "acc_stderr": 0.03057281131029961, "acc_norm": 0.32051282051282054, "acc_norm_stderr": 0.03057281131029961 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.29246487867177523, "acc_stderr": 0.016267000684598645, "acc_norm": 0.29246487867177523, "acc_norm_stderr": 0.016267000684598645 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.2832369942196532, "acc_stderr": 0.024257901705323378, "acc_norm": 0.2832369942196532, "acc_norm_stderr": 0.024257901705323378 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.24692737430167597, "acc_stderr": 0.014422292204808836, "acc_norm": 0.24692737430167597, "acc_norm_stderr": 0.014422292204808836 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.28431372549019607, "acc_stderr": 0.025829163272757482, "acc_norm": 0.28431372549019607, "acc_norm_stderr": 0.025829163272757482 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.3408360128617363, "acc_stderr": 0.02692084126077616, "acc_norm": 0.3408360128617363, "acc_norm_stderr": 0.02692084126077616 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.30246913580246915, "acc_stderr": 0.025557653981868055, "acc_norm": 0.30246913580246915, "acc_norm_stderr": 0.025557653981868055 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.24822695035460993, "acc_stderr": 0.025770015644290396, "acc_norm": 0.24822695035460993, "acc_norm_stderr": 0.025770015644290396 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.288135593220339, "acc_stderr": 0.011567140661324563, "acc_norm": 0.288135593220339, "acc_norm_stderr": 0.011567140661324563 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.4485294117647059, "acc_stderr": 0.030211479609121593, "acc_norm": 0.4485294117647059, "acc_norm_stderr": 0.030211479609121593 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.25, "acc_stderr": 0.01751781884501444, "acc_norm": 0.25, "acc_norm_stderr": 0.01751781884501444 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.35454545454545455, "acc_stderr": 0.04582004841505416, "acc_norm": 0.35454545454545455, "acc_norm_stderr": 0.04582004841505416 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.40408163265306124, "acc_stderr": 0.031414708025865885, "acc_norm": 0.40408163265306124, "acc_norm_stderr": 0.031414708025865885 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24875621890547264, "acc_stderr": 0.030567675938916714, "acc_norm": 0.24875621890547264, "acc_norm_stderr": 0.030567675938916714 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.24, "acc_stderr": 0.04292346959909281, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909281 }, "harness|hendrycksTest-virology|5": { "acc": 0.30120481927710846, "acc_stderr": 0.0357160923005348, "acc_norm": 0.30120481927710846, "acc_norm_stderr": 0.0357160923005348 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.3157894736842105, "acc_stderr": 0.03565079670708312, "acc_norm": 0.3157894736842105, "acc_norm_stderr": 0.03565079670708312 }, "harness|truthfulqa:mc|0": { "mc1": 0.2252141982864137, "mc1_stderr": 0.014623240768023505, "mc2": 0.35997350772493825, "mc2_stderr": 0.014042588070598837 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_Taekyoon__llama2-ko-7b-test
[ "region:us" ]
2023-08-28T08:09:25+00:00
{"pretty_name": "Evaluation run of Taekyoon/llama2-ko-7b-test", "dataset_summary": "Dataset automatically created during the evaluation run of model [Taekyoon/llama2-ko-7b-test](https://huggingface.co/Taekyoon/llama2-ko-7b-test) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Taekyoon__llama2-ko-7b-test\",\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-08-28T09:09:02.494936](https://huggingface.co/datasets/open-llm-leaderboard/details_Taekyoon__llama2-ko-7b-test/blob/main/results_2023-08-28T09%3A09%3A02.494936.json):\n\n```python\n{\n \"all\": {\n \"acc\": 0.29939516770443486,\n \"acc_stderr\": 0.03301601100044412,\n \"acc_norm\": 0.3025804071569399,\n \"acc_norm_stderr\": 0.03301705555289615,\n \"mc1\": 0.2252141982864137,\n \"mc1_stderr\": 0.014623240768023505,\n \"mc2\": 0.35997350772493825,\n \"mc2_stderr\": 0.014042588070598837\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.3506825938566553,\n \"acc_stderr\": 0.013944635930726089,\n \"acc_norm\": 0.3779863481228669,\n \"acc_norm_stderr\": 0.014169664520303105\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.4697271459868552,\n \"acc_stderr\": 0.004980627287147587,\n \"acc_norm\": 0.6303525194184425,\n \"acc_norm_stderr\": 0.004817227292240292\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816505,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816505\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.2518518518518518,\n \"acc_stderr\": 0.03749850709174022,\n \"acc_norm\": 0.2518518518518518,\n \"acc_norm_stderr\": 0.03749850709174022\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.3223684210526316,\n \"acc_stderr\": 0.03803510248351585,\n \"acc_norm\": 0.3223684210526316,\n \"acc_norm_stderr\": 0.03803510248351585\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.3320754716981132,\n \"acc_stderr\": 0.02898545565233439,\n \"acc_norm\": 0.3320754716981132,\n \"acc_norm_stderr\": 0.02898545565233439\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.24305555555555555,\n \"acc_stderr\": 0.03586879280080341,\n \"acc_norm\": 0.24305555555555555,\n \"acc_norm_stderr\": 0.03586879280080341\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.2774566473988439,\n \"acc_stderr\": 0.034140140070440354,\n \"acc_norm\": 0.2774566473988439,\n \"acc_norm_stderr\": 0.034140140070440354\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237657,\n \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237657\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.047609522856952344,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.047609522856952344\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.32340425531914896,\n \"acc_stderr\": 0.030579442773610334,\n \"acc_norm\": 0.32340425531914896,\n \"acc_norm_stderr\": 0.030579442773610334\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.044346007015849245,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.044346007015849245\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2413793103448276,\n \"acc_stderr\": 0.03565998174135303,\n \"acc_norm\": 0.2413793103448276,\n \"acc_norm_stderr\": 0.03565998174135303\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.022569897074918424,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.022569897074918424\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.15079365079365079,\n \"acc_stderr\": 0.032006864972873916,\n \"acc_norm\": 0.15079365079365079,\n \"acc_norm_stderr\": 0.032006864972873916\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.23225806451612904,\n \"acc_stderr\": 0.02402225613030824,\n \"acc_norm\": 0.23225806451612904,\n \"acc_norm_stderr\": 0.02402225613030824\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.26108374384236455,\n \"acc_stderr\": 0.030903796952114485,\n \"acc_norm\": 0.26108374384236455,\n \"acc_norm_stderr\": 0.030903796952114485\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.2787878787878788,\n \"acc_stderr\": 0.03501438706296781,\n \"acc_norm\": 0.2787878787878788,\n \"acc_norm_stderr\": 0.03501438706296781\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.37373737373737376,\n \"acc_stderr\": 0.034468977386593325,\n \"acc_norm\": 0.37373737373737376,\n \"acc_norm_stderr\": 0.034468977386593325\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.3471502590673575,\n \"acc_stderr\": 0.03435696168361355,\n \"acc_norm\": 0.3471502590673575,\n \"acc_norm_stderr\": 0.03435696168361355\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.32051282051282054,\n \"acc_stderr\": 0.023661296393964273,\n \"acc_norm\": 0.32051282051282054,\n \"acc_norm_stderr\": 0.023661296393964273\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.26666666666666666,\n \"acc_stderr\": 0.026962424325073835,\n \"acc_norm\": 0.26666666666666666,\n \"acc_norm_stderr\": 0.026962424325073835\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.29411764705882354,\n \"acc_stderr\": 0.029597329730978086,\n \"acc_norm\": 0.29411764705882354,\n \"acc_norm_stderr\": 0.029597329730978086\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.39072847682119205,\n \"acc_stderr\": 0.039837983066598096,\n \"acc_norm\": 0.39072847682119205,\n \"acc_norm_stderr\": 0.039837983066598096\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.3357798165137615,\n \"acc_stderr\": 0.020248081396752937,\n \"acc_norm\": 0.3357798165137615,\n \"acc_norm_stderr\": 0.020248081396752937\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4675925925925926,\n \"acc_stderr\": 0.03402801581358966,\n \"acc_norm\": 0.4675925925925926,\n \"acc_norm_stderr\": 0.03402801581358966\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.24509803921568626,\n \"acc_stderr\": 0.030190282453501954,\n \"acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.030190282453501954\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.270042194092827,\n \"acc_stderr\": 0.028900721906293426,\n \"acc_norm\": 0.270042194092827,\n \"acc_norm_stderr\": 0.028900721906293426\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.34080717488789236,\n \"acc_stderr\": 0.0318114974705536,\n \"acc_norm\": 0.34080717488789236,\n \"acc_norm_stderr\": 0.0318114974705536\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.3053435114503817,\n \"acc_stderr\": 0.040393149787245626,\n \"acc_norm\": 0.3053435114503817,\n \"acc_norm_stderr\": 0.040393149787245626\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.3305785123966942,\n \"acc_stderr\": 0.04294340845212094,\n \"acc_norm\": 0.3305785123966942,\n \"acc_norm_stderr\": 0.04294340845212094\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.2777777777777778,\n \"acc_stderr\": 0.043300437496507416,\n \"acc_norm\": 0.2777777777777778,\n \"acc_norm_stderr\": 0.043300437496507416\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.22085889570552147,\n \"acc_stderr\": 0.032591773927421776,\n \"acc_norm\": 0.22085889570552147,\n \"acc_norm_stderr\": 0.032591773927421776\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.20535714285714285,\n \"acc_stderr\": 0.038342410214190735,\n \"acc_norm\": 0.20535714285714285,\n \"acc_norm_stderr\": 0.038342410214190735\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.36893203883495146,\n \"acc_stderr\": 0.04777615181156739,\n \"acc_norm\": 0.36893203883495146,\n \"acc_norm_stderr\": 0.04777615181156739\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.32051282051282054,\n \"acc_stderr\": 0.03057281131029961,\n \"acc_norm\": 0.32051282051282054,\n \"acc_norm_stderr\": 0.03057281131029961\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.29246487867177523,\n \"acc_stderr\": 0.016267000684598645,\n \"acc_norm\": 0.29246487867177523,\n \"acc_norm_stderr\": 0.016267000684598645\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.2832369942196532,\n \"acc_stderr\": 0.024257901705323378,\n \"acc_norm\": 0.2832369942196532,\n \"acc_norm_stderr\": 0.024257901705323378\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.24692737430167597,\n \"acc_stderr\": 0.014422292204808836,\n \"acc_norm\": 0.24692737430167597,\n \"acc_norm_stderr\": 0.014422292204808836\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.28431372549019607,\n \"acc_stderr\": 0.025829163272757482,\n \"acc_norm\": 0.28431372549019607,\n \"acc_norm_stderr\": 0.025829163272757482\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.3408360128617363,\n \"acc_stderr\": 0.02692084126077616,\n \"acc_norm\": 0.3408360128617363,\n \"acc_norm_stderr\": 0.02692084126077616\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.30246913580246915,\n \"acc_stderr\": 0.025557653981868055,\n \"acc_norm\": 0.30246913580246915,\n \"acc_norm_stderr\": 0.025557653981868055\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.24822695035460993,\n \"acc_stderr\": 0.025770015644290396,\n \"acc_norm\": 0.24822695035460993,\n \"acc_norm_stderr\": 0.025770015644290396\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.288135593220339,\n \"acc_stderr\": 0.011567140661324563,\n \"acc_norm\": 0.288135593220339,\n \"acc_norm_stderr\": 0.011567140661324563\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.4485294117647059,\n \"acc_stderr\": 0.030211479609121593,\n \"acc_norm\": 0.4485294117647059,\n \"acc_norm_stderr\": 0.030211479609121593\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.01751781884501444,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.01751781884501444\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.35454545454545455,\n \"acc_stderr\": 0.04582004841505416,\n \"acc_norm\": 0.35454545454545455,\n \"acc_norm_stderr\": 0.04582004841505416\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.40408163265306124,\n \"acc_stderr\": 0.031414708025865885,\n \"acc_norm\": 0.40408163265306124,\n \"acc_norm_stderr\": 0.031414708025865885\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.24875621890547264,\n \"acc_stderr\": 0.030567675938916714,\n \"acc_norm\": 0.24875621890547264,\n \"acc_norm_stderr\": 0.030567675938916714\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909281,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909281\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.30120481927710846,\n \"acc_stderr\": 0.0357160923005348,\n \"acc_norm\": 0.30120481927710846,\n \"acc_norm_stderr\": 0.0357160923005348\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.3157894736842105,\n \"acc_stderr\": 0.03565079670708312,\n \"acc_norm\": 0.3157894736842105,\n \"acc_norm_stderr\": 0.03565079670708312\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2252141982864137,\n \"mc1_stderr\": 0.014623240768023505,\n \"mc2\": 0.35997350772493825,\n \"mc2_stderr\": 0.014042588070598837\n }\n}\n```", "repo_url": "https://huggingface.co/Taekyoon/llama2-ko-7b-test", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|arc:challenge|25_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hellaswag|10_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-28T09:09:02.494936.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-28T09:09:02.494936.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_28T09_09_02.494936", "path": ["results_2023-08-28T09:09:02.494936.parquet"]}, {"split": "latest", "path": ["results_2023-08-28T09:09:02.494936.parquet"]}]}]}
2023-08-28T08:10:22+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Taekyoon/llama2-ko-7b-test ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model Taekyoon/llama2-ko-7b-test on the Open LLM Leaderboard. The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-08-28T09:09:02.494936: ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of Taekyoon/llama2-ko-7b-test", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Taekyoon/llama2-ko-7b-test on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-08-28T09:09:02.494936:", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Taekyoon/llama2-ko-7b-test", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Taekyoon/llama2-ko-7b-test on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-08-28T09:09:02.494936:", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 22, 31, 170, 22, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Taekyoon/llama2-ko-7b-test## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model Taekyoon/llama2-ko-7b-test on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-08-28T09:09:02.494936:### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
c278053eae0563303d01aea4753b025b07830dba
# Dataset Card for Evaluation run of TinyPixel/lima-test ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/TinyPixel/lima-test - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [TinyPixel/lima-test](https://huggingface.co/TinyPixel/lima-test) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TinyPixel__lima-test", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-17T19:33:22.756804](https://huggingface.co/datasets/open-llm-leaderboard/details_TinyPixel__lima-test/blob/main/results_2023-10-17T19-33-22.756804.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.001363255033557047, "em_stderr": 0.0003778609196461008, "f1": 0.05645763422818797, "f1_stderr": 0.0013211879752480866, "acc": 0.40996862682678575, "acc_stderr": 0.009889312522114477 }, "harness|drop|3": { "em": 0.001363255033557047, "em_stderr": 0.0003778609196461008, "f1": 0.05645763422818797, "f1_stderr": 0.0013211879752480866 }, "harness|gsm8k|5": { "acc": 0.07960576194086429, "acc_stderr": 0.007455924338676286 }, "harness|winogrande|5": { "acc": 0.7403314917127072, "acc_stderr": 0.012322700705552667 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_TinyPixel__lima-test
[ "region:us" ]
2023-08-28T08:11:03+00:00
{"pretty_name": "Evaluation run of TinyPixel/lima-test", "dataset_summary": "Dataset automatically created during the evaluation run of model [TinyPixel/lima-test](https://huggingface.co/TinyPixel/lima-test) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TinyPixel__lima-test\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-17T19:33:22.756804](https://huggingface.co/datasets/open-llm-leaderboard/details_TinyPixel__lima-test/blob/main/results_2023-10-17T19-33-22.756804.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.001363255033557047,\n \"em_stderr\": 0.0003778609196461008,\n \"f1\": 0.05645763422818797,\n \"f1_stderr\": 0.0013211879752480866,\n \"acc\": 0.40996862682678575,\n \"acc_stderr\": 0.009889312522114477\n },\n \"harness|drop|3\": {\n \"em\": 0.001363255033557047,\n \"em_stderr\": 0.0003778609196461008,\n \"f1\": 0.05645763422818797,\n \"f1_stderr\": 0.0013211879752480866\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.07960576194086429,\n \"acc_stderr\": 0.007455924338676286\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7403314917127072,\n \"acc_stderr\": 0.012322700705552667\n }\n}\n```", "repo_url": "https://huggingface.co/TinyPixel/lima-test", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|arc:challenge|25_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_17T19_33_22.756804", "path": ["**/details_harness|drop|3_2023-10-17T19-33-22.756804.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-17T19-33-22.756804.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_17T19_33_22.756804", "path": ["**/details_harness|gsm8k|5_2023-10-17T19-33-22.756804.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-17T19-33-22.756804.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hellaswag|10_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-28T09:10:45.645303.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-28T09:10:45.645303.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-28T09:10:45.645303.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_17T19_33_22.756804", "path": ["**/details_harness|winogrande|5_2023-10-17T19-33-22.756804.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-17T19-33-22.756804.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_28T09_10_45.645303", "path": ["results_2023-08-28T09:10:45.645303.parquet"]}, {"split": "2023_10_17T19_33_22.756804", "path": ["results_2023-10-17T19-33-22.756804.parquet"]}, {"split": "latest", "path": ["results_2023-10-17T19-33-22.756804.parquet"]}]}]}
2023-10-17T18:33:36+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of TinyPixel/lima-test ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model TinyPixel/lima-test on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-17T19:33:22.756804(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of TinyPixel/lima-test", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model TinyPixel/lima-test on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-17T19:33:22.756804(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of TinyPixel/lima-test", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model TinyPixel/lima-test on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-17T19:33:22.756804(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 17, 31, 165, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of TinyPixel/lima-test## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model TinyPixel/lima-test on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-17T19:33:22.756804(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
c15d0321350e61f8c578f0cadc7f8695ea54e385
# Dataset Card for Dataset Name ## Dataset Description - **Homepage:** - **Repository:** - **Paper:** - **Leaderboard:** - **Point of Contact:** ### Dataset Summary MLIR file associated passes, crawled from the official LLVM/MLIR repository. ### Supported Tasks and Leaderboards Code LLM for MLIR optimization ### Languages MLIR
WangX0111/MLIR-Passes
[ "license:mit", "region:us" ]
2023-08-28T08:15:24+00:00
{"license": "mit"}
2023-08-28T08:20:23+00:00
[]
[]
TAGS #license-mit #region-us
# Dataset Card for Dataset Name ## Dataset Description - Homepage: - Repository: - Paper: - Leaderboard: - Point of Contact: ### Dataset Summary MLIR file associated passes, crawled from the official LLVM/MLIR repository. ### Supported Tasks and Leaderboards Code LLM for MLIR optimization ### Languages MLIR
[ "# Dataset Card for Dataset Name", "## Dataset Description\n\n- Homepage: \n- Repository: \n- Paper: \n- Leaderboard: \n- Point of Contact:", "### Dataset Summary\n\nMLIR file associated passes, crawled from the official LLVM/MLIR repository.", "### Supported Tasks and Leaderboards\n\nCode LLM for MLIR optimization", "### Languages\n\nMLIR" ]
[ "TAGS\n#license-mit #region-us \n", "# Dataset Card for Dataset Name", "## Dataset Description\n\n- Homepage: \n- Repository: \n- Paper: \n- Leaderboard: \n- Point of Contact:", "### Dataset Summary\n\nMLIR file associated passes, crawled from the official LLVM/MLIR repository.", "### Supported Tasks and Leaderboards\n\nCode LLM for MLIR optimization", "### Languages\n\nMLIR" ]
[ 11, 8, 24, 28, 19, 7 ]
[ "passage: TAGS\n#license-mit #region-us \n# Dataset Card for Dataset Name## Dataset Description\n\n- Homepage: \n- Repository: \n- Paper: \n- Leaderboard: \n- Point of Contact:### Dataset Summary\n\nMLIR file associated passes, crawled from the official LLVM/MLIR repository.### Supported Tasks and Leaderboards\n\nCode LLM for MLIR optimization### Languages\n\nMLIR" ]
7b666ec1f42b9f6b1ff2b34a8f2f18dd190591e1
# Dataset Card for "DST_Multiwoz21_instruction_tuning" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
AtheerAlgherairy/DST_Multiwoz21_instruction_Tuning
[ "region:us" ]
2023-08-28T08:24:40+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "context", "dtype": "string"}, {"name": "Answer", "dtype": "string"}, {"name": "json_answer", "dtype": "string"}, {"name": "chat_prompt_no_ans", "dtype": "string"}, {"name": "chat_prompt", "dtype": "string"}, {"name": "llama_prompt_no_ans", "dtype": "string"}, {"name": "llama_prompt", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 1083744915, "num_examples": 56778}, {"name": "validation", "num_bytes": 142248451, "num_examples": 7374}, {"name": "test", "num_bytes": 142462443, "num_examples": 7372}], "download_size": 140370223, "dataset_size": 1368455809}}
2023-10-12T07:58:08+00:00
[]
[]
TAGS #region-us
# Dataset Card for "DST_Multiwoz21_instruction_tuning" More Information needed
[ "# Dataset Card for \"DST_Multiwoz21_instruction_tuning\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"DST_Multiwoz21_instruction_tuning\"\n\nMore Information needed" ]
[ 6, 23 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"DST_Multiwoz21_instruction_tuning\"\n\nMore Information needed" ]
1de024e58a86e97e6b107ac23c8f27b376afba31
# Dataset Card for Evaluation run of yeontaek/llama-2-13B-ensemble-v4 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/yeontaek/llama-2-13B-ensemble-v4 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [yeontaek/llama-2-13B-ensemble-v4](https://huggingface.co/yeontaek/llama-2-13B-ensemble-v4) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_yeontaek__llama-2-13B-ensemble-v4", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-28T09:27:03.867556](https://huggingface.co/datasets/open-llm-leaderboard/details_yeontaek__llama-2-13B-ensemble-v4/blob/main/results_2023-08-28T09%3A27%3A03.867556.json): ```python { "all": { "acc": 0.5663992305904989, "acc_stderr": 0.03429173024379658, "acc_norm": 0.5702504327612581, "acc_norm_stderr": 0.03427095428817404, "mc1": 0.3806609547123623, "mc1_stderr": 0.016997627871907926, "mc2": 0.518155888420307, "mc2_stderr": 0.015704569450921007 }, "harness|arc:challenge|25": { "acc": 0.6015358361774744, "acc_stderr": 0.014306946052735565, "acc_norm": 0.6296928327645052, "acc_norm_stderr": 0.01411129875167495 }, "harness|hellaswag|10": { "acc": 0.6247759410476, "acc_stderr": 0.004831911860478687, "acc_norm": 0.8238398725353515, "acc_norm_stderr": 0.0038017777798095838 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4666666666666667, "acc_stderr": 0.043097329010363554, "acc_norm": 0.4666666666666667, "acc_norm_stderr": 0.043097329010363554 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5526315789473685, "acc_stderr": 0.0404633688397825, "acc_norm": 0.5526315789473685, "acc_norm_stderr": 0.0404633688397825 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.59, "acc_stderr": 0.049431107042371025, "acc_norm": 0.59, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6113207547169811, "acc_stderr": 0.030000485448675986, "acc_norm": 0.6113207547169811, "acc_norm_stderr": 0.030000485448675986 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5694444444444444, "acc_stderr": 0.04140685639111503, "acc_norm": 0.5694444444444444, "acc_norm_stderr": 0.04140685639111503 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5260115606936416, "acc_stderr": 0.038073017265045125, "acc_norm": 0.5260115606936416, "acc_norm_stderr": 0.038073017265045125 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.29411764705882354, "acc_stderr": 0.04533838195929775, "acc_norm": 0.29411764705882354, "acc_norm_stderr": 0.04533838195929775 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.74, "acc_stderr": 0.04408440022768079, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768079 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4851063829787234, "acc_stderr": 0.032671518489247764, "acc_norm": 0.4851063829787234, "acc_norm_stderr": 0.032671518489247764 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2894736842105263, "acc_stderr": 0.04266339443159394, "acc_norm": 0.2894736842105263, "acc_norm_stderr": 0.04266339443159394 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5241379310344828, "acc_stderr": 0.0416180850350153, "acc_norm": 0.5241379310344828, "acc_norm_stderr": 0.0416180850350153 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.30423280423280424, "acc_stderr": 0.023695415009463087, "acc_norm": 0.30423280423280424, "acc_norm_stderr": 0.023695415009463087 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3492063492063492, "acc_stderr": 0.04263906892795133, "acc_norm": 0.3492063492063492, "acc_norm_stderr": 0.04263906892795133 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.632258064516129, "acc_stderr": 0.027430866579973467, "acc_norm": 0.632258064516129, "acc_norm_stderr": 0.027430866579973467 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4433497536945813, "acc_stderr": 0.03495334582162934, "acc_norm": 0.4433497536945813, "acc_norm_stderr": 0.03495334582162934 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.59, "acc_stderr": 0.04943110704237102, "acc_norm": 0.59, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6909090909090909, "acc_stderr": 0.036085410115739666, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.036085410115739666 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7373737373737373, "acc_stderr": 0.03135305009533086, "acc_norm": 0.7373737373737373, "acc_norm_stderr": 0.03135305009533086 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8082901554404145, "acc_stderr": 0.028408953626245265, "acc_norm": 0.8082901554404145, "acc_norm_stderr": 0.028408953626245265 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5641025641025641, "acc_stderr": 0.025141801511177498, "acc_norm": 0.5641025641025641, "acc_norm_stderr": 0.025141801511177498 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2851851851851852, "acc_stderr": 0.027528599210340496, "acc_norm": 0.2851851851851852, "acc_norm_stderr": 0.027528599210340496 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6050420168067226, "acc_stderr": 0.03175367846096626, "acc_norm": 0.6050420168067226, "acc_norm_stderr": 0.03175367846096626 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.304635761589404, "acc_stderr": 0.03757949922943343, "acc_norm": 0.304635761589404, "acc_norm_stderr": 0.03757949922943343 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7559633027522936, "acc_stderr": 0.018415286351416402, "acc_norm": 0.7559633027522936, "acc_norm_stderr": 0.018415286351416402 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.49074074074074076, "acc_stderr": 0.03409386946992699, "acc_norm": 0.49074074074074076, "acc_norm_stderr": 0.03409386946992699 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.75, "acc_stderr": 0.03039153369274154, "acc_norm": 0.75, "acc_norm_stderr": 0.03039153369274154 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7341772151898734, "acc_stderr": 0.02875679962965834, "acc_norm": 0.7341772151898734, "acc_norm_stderr": 0.02875679962965834 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6681614349775785, "acc_stderr": 0.031602951437766785, "acc_norm": 0.6681614349775785, "acc_norm_stderr": 0.031602951437766785 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.648854961832061, "acc_stderr": 0.04186445163013751, "acc_norm": 0.648854961832061, "acc_norm_stderr": 0.04186445163013751 }, "harness|hendrycksTest-international_law|5": { "acc": 0.743801652892562, "acc_stderr": 0.03984979653302872, "acc_norm": 0.743801652892562, "acc_norm_stderr": 0.03984979653302872 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7222222222222222, "acc_stderr": 0.043300437496507416, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.043300437496507416 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6871165644171779, "acc_stderr": 0.036429145782924055, "acc_norm": 0.6871165644171779, "acc_norm_stderr": 0.036429145782924055 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3482142857142857, "acc_stderr": 0.04521829902833585, "acc_norm": 0.3482142857142857, "acc_norm_stderr": 0.04521829902833585 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7991452991452992, "acc_stderr": 0.026246772946890484, "acc_norm": 0.7991452991452992, "acc_norm_stderr": 0.026246772946890484 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.63, "acc_stderr": 0.048523658709391, "acc_norm": 0.63, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7650063856960408, "acc_stderr": 0.015162024152278452, "acc_norm": 0.7650063856960408, "acc_norm_stderr": 0.015162024152278452 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6242774566473989, "acc_stderr": 0.02607431485165708, "acc_norm": 0.6242774566473989, "acc_norm_stderr": 0.02607431485165708 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3664804469273743, "acc_stderr": 0.016115235504865467, "acc_norm": 0.3664804469273743, "acc_norm_stderr": 0.016115235504865467 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6045751633986928, "acc_stderr": 0.02799672318063145, "acc_norm": 0.6045751633986928, "acc_norm_stderr": 0.02799672318063145 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6527331189710611, "acc_stderr": 0.027040745502307336, "acc_norm": 0.6527331189710611, "acc_norm_stderr": 0.027040745502307336 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6419753086419753, "acc_stderr": 0.026675611926037093, "acc_norm": 0.6419753086419753, "acc_norm_stderr": 0.026675611926037093 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.41843971631205673, "acc_stderr": 0.02942799403941999, "acc_norm": 0.41843971631205673, "acc_norm_stderr": 0.02942799403941999 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.42633637548891784, "acc_stderr": 0.012630884771599692, "acc_norm": 0.42633637548891784, "acc_norm_stderr": 0.012630884771599692 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5330882352941176, "acc_stderr": 0.03030625772246831, "acc_norm": 0.5330882352941176, "acc_norm_stderr": 0.03030625772246831 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.576797385620915, "acc_stderr": 0.01998780976948206, "acc_norm": 0.576797385620915, "acc_norm_stderr": 0.01998780976948206 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6090909090909091, "acc_stderr": 0.046737523336702384, "acc_norm": 0.6090909090909091, "acc_norm_stderr": 0.046737523336702384 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6489795918367347, "acc_stderr": 0.03055531675557364, "acc_norm": 0.6489795918367347, "acc_norm_stderr": 0.03055531675557364 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6268656716417911, "acc_stderr": 0.034198326081760065, "acc_norm": 0.6268656716417911, "acc_norm_stderr": 0.034198326081760065 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.0358870281282637, "acc_norm": 0.85, "acc_norm_stderr": 0.0358870281282637 }, "harness|hendrycksTest-virology|5": { "acc": 0.4457831325301205, "acc_stderr": 0.03869543323472101, "acc_norm": 0.4457831325301205, "acc_norm_stderr": 0.03869543323472101 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7719298245614035, "acc_stderr": 0.032180937956023566, "acc_norm": 0.7719298245614035, "acc_norm_stderr": 0.032180937956023566 }, "harness|truthfulqa:mc|0": { "mc1": 0.3806609547123623, "mc1_stderr": 0.016997627871907926, "mc2": 0.518155888420307, "mc2_stderr": 0.015704569450921007 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_yeontaek__llama-2-13B-ensemble-v4
[ "region:us" ]
2023-08-28T08:27:26+00:00
{"pretty_name": "Evaluation run of yeontaek/llama-2-13B-ensemble-v4", "dataset_summary": "Dataset automatically created during the evaluation run of model [yeontaek/llama-2-13B-ensemble-v4](https://huggingface.co/yeontaek/llama-2-13B-ensemble-v4) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_yeontaek__llama-2-13B-ensemble-v4\",\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-08-28T09:27:03.867556](https://huggingface.co/datasets/open-llm-leaderboard/details_yeontaek__llama-2-13B-ensemble-v4/blob/main/results_2023-08-28T09%3A27%3A03.867556.json):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5663992305904989,\n \"acc_stderr\": 0.03429173024379658,\n \"acc_norm\": 0.5702504327612581,\n \"acc_norm_stderr\": 0.03427095428817404,\n \"mc1\": 0.3806609547123623,\n \"mc1_stderr\": 0.016997627871907926,\n \"mc2\": 0.518155888420307,\n \"mc2_stderr\": 0.015704569450921007\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6015358361774744,\n \"acc_stderr\": 0.014306946052735565,\n \"acc_norm\": 0.6296928327645052,\n \"acc_norm_stderr\": 0.01411129875167495\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6247759410476,\n \"acc_stderr\": 0.004831911860478687,\n \"acc_norm\": 0.8238398725353515,\n \"acc_norm_stderr\": 0.0038017777798095838\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4666666666666667,\n \"acc_stderr\": 0.043097329010363554,\n \"acc_norm\": 0.4666666666666667,\n \"acc_norm_stderr\": 0.043097329010363554\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5526315789473685,\n \"acc_stderr\": 0.0404633688397825,\n \"acc_norm\": 0.5526315789473685,\n \"acc_norm_stderr\": 0.0404633688397825\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.59,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6113207547169811,\n \"acc_stderr\": 0.030000485448675986,\n \"acc_norm\": 0.6113207547169811,\n \"acc_norm_stderr\": 0.030000485448675986\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5694444444444444,\n \"acc_stderr\": 0.04140685639111503,\n \"acc_norm\": 0.5694444444444444,\n \"acc_norm_stderr\": 0.04140685639111503\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5260115606936416,\n \"acc_stderr\": 0.038073017265045125,\n \"acc_norm\": 0.5260115606936416,\n \"acc_norm_stderr\": 0.038073017265045125\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.29411764705882354,\n \"acc_stderr\": 0.04533838195929775,\n \"acc_norm\": 0.29411764705882354,\n \"acc_norm_stderr\": 0.04533838195929775\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768079,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768079\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.4851063829787234,\n \"acc_stderr\": 0.032671518489247764,\n \"acc_norm\": 0.4851063829787234,\n \"acc_norm_stderr\": 0.032671518489247764\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2894736842105263,\n \"acc_stderr\": 0.04266339443159394,\n \"acc_norm\": 0.2894736842105263,\n \"acc_norm_stderr\": 0.04266339443159394\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5241379310344828,\n \"acc_stderr\": 0.0416180850350153,\n \"acc_norm\": 0.5241379310344828,\n \"acc_norm_stderr\": 0.0416180850350153\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.30423280423280424,\n \"acc_stderr\": 0.023695415009463087,\n \"acc_norm\": 0.30423280423280424,\n \"acc_norm_stderr\": 0.023695415009463087\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3492063492063492,\n \"acc_stderr\": 0.04263906892795133,\n \"acc_norm\": 0.3492063492063492,\n \"acc_norm_stderr\": 0.04263906892795133\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.632258064516129,\n \"acc_stderr\": 0.027430866579973467,\n \"acc_norm\": 0.632258064516129,\n \"acc_norm_stderr\": 0.027430866579973467\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4433497536945813,\n \"acc_stderr\": 0.03495334582162934,\n \"acc_norm\": 0.4433497536945813,\n \"acc_norm_stderr\": 0.03495334582162934\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.59,\n \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.036085410115739666,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.036085410115739666\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7373737373737373,\n \"acc_stderr\": 0.03135305009533086,\n \"acc_norm\": 0.7373737373737373,\n \"acc_norm_stderr\": 0.03135305009533086\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8082901554404145,\n \"acc_stderr\": 0.028408953626245265,\n \"acc_norm\": 0.8082901554404145,\n \"acc_norm_stderr\": 0.028408953626245265\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5641025641025641,\n \"acc_stderr\": 0.025141801511177498,\n \"acc_norm\": 0.5641025641025641,\n \"acc_norm_stderr\": 0.025141801511177498\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2851851851851852,\n \"acc_stderr\": 0.027528599210340496,\n \"acc_norm\": 0.2851851851851852,\n \"acc_norm_stderr\": 0.027528599210340496\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6050420168067226,\n \"acc_stderr\": 0.03175367846096626,\n \"acc_norm\": 0.6050420168067226,\n \"acc_norm_stderr\": 0.03175367846096626\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.304635761589404,\n \"acc_stderr\": 0.03757949922943343,\n \"acc_norm\": 0.304635761589404,\n \"acc_norm_stderr\": 0.03757949922943343\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7559633027522936,\n \"acc_stderr\": 0.018415286351416402,\n \"acc_norm\": 0.7559633027522936,\n \"acc_norm_stderr\": 0.018415286351416402\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.49074074074074076,\n \"acc_stderr\": 0.03409386946992699,\n \"acc_norm\": 0.49074074074074076,\n \"acc_norm_stderr\": 0.03409386946992699\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.03039153369274154,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.03039153369274154\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7341772151898734,\n \"acc_stderr\": 0.02875679962965834,\n \"acc_norm\": 0.7341772151898734,\n \"acc_norm_stderr\": 0.02875679962965834\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6681614349775785,\n \"acc_stderr\": 0.031602951437766785,\n \"acc_norm\": 0.6681614349775785,\n \"acc_norm_stderr\": 0.031602951437766785\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.648854961832061,\n \"acc_stderr\": 0.04186445163013751,\n \"acc_norm\": 0.648854961832061,\n \"acc_norm_stderr\": 0.04186445163013751\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.743801652892562,\n \"acc_stderr\": 0.03984979653302872,\n \"acc_norm\": 0.743801652892562,\n \"acc_norm_stderr\": 0.03984979653302872\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.043300437496507416,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.043300437496507416\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6871165644171779,\n \"acc_stderr\": 0.036429145782924055,\n \"acc_norm\": 0.6871165644171779,\n \"acc_norm_stderr\": 0.036429145782924055\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3482142857142857,\n \"acc_stderr\": 0.04521829902833585,\n \"acc_norm\": 0.3482142857142857,\n \"acc_norm_stderr\": 0.04521829902833585\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7991452991452992,\n \"acc_stderr\": 0.026246772946890484,\n \"acc_norm\": 0.7991452991452992,\n \"acc_norm_stderr\": 0.026246772946890484\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7650063856960408,\n \"acc_stderr\": 0.015162024152278452,\n \"acc_norm\": 0.7650063856960408,\n \"acc_norm_stderr\": 0.015162024152278452\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6242774566473989,\n \"acc_stderr\": 0.02607431485165708,\n \"acc_norm\": 0.6242774566473989,\n \"acc_norm_stderr\": 0.02607431485165708\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3664804469273743,\n \"acc_stderr\": 0.016115235504865467,\n \"acc_norm\": 0.3664804469273743,\n \"acc_norm_stderr\": 0.016115235504865467\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6045751633986928,\n \"acc_stderr\": 0.02799672318063145,\n \"acc_norm\": 0.6045751633986928,\n \"acc_norm_stderr\": 0.02799672318063145\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6527331189710611,\n \"acc_stderr\": 0.027040745502307336,\n \"acc_norm\": 0.6527331189710611,\n \"acc_norm_stderr\": 0.027040745502307336\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6419753086419753,\n \"acc_stderr\": 0.026675611926037093,\n \"acc_norm\": 0.6419753086419753,\n \"acc_norm_stderr\": 0.026675611926037093\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.41843971631205673,\n \"acc_stderr\": 0.02942799403941999,\n \"acc_norm\": 0.41843971631205673,\n \"acc_norm_stderr\": 0.02942799403941999\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.42633637548891784,\n \"acc_stderr\": 0.012630884771599692,\n \"acc_norm\": 0.42633637548891784,\n \"acc_norm_stderr\": 0.012630884771599692\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5330882352941176,\n \"acc_stderr\": 0.03030625772246831,\n \"acc_norm\": 0.5330882352941176,\n \"acc_norm_stderr\": 0.03030625772246831\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.576797385620915,\n \"acc_stderr\": 0.01998780976948206,\n \"acc_norm\": 0.576797385620915,\n \"acc_norm_stderr\": 0.01998780976948206\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6090909090909091,\n \"acc_stderr\": 0.046737523336702384,\n \"acc_norm\": 0.6090909090909091,\n \"acc_norm_stderr\": 0.046737523336702384\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6489795918367347,\n \"acc_stderr\": 0.03055531675557364,\n \"acc_norm\": 0.6489795918367347,\n \"acc_norm_stderr\": 0.03055531675557364\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6268656716417911,\n \"acc_stderr\": 0.034198326081760065,\n \"acc_norm\": 0.6268656716417911,\n \"acc_norm_stderr\": 0.034198326081760065\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.0358870281282637,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.0358870281282637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4457831325301205,\n \"acc_stderr\": 0.03869543323472101,\n \"acc_norm\": 0.4457831325301205,\n \"acc_norm_stderr\": 0.03869543323472101\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7719298245614035,\n \"acc_stderr\": 0.032180937956023566,\n \"acc_norm\": 0.7719298245614035,\n \"acc_norm_stderr\": 0.032180937956023566\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3806609547123623,\n \"mc1_stderr\": 0.016997627871907926,\n \"mc2\": 0.518155888420307,\n \"mc2_stderr\": 0.015704569450921007\n }\n}\n```", "repo_url": "https://huggingface.co/yeontaek/llama-2-13B-ensemble-v4", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|arc:challenge|25_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hellaswag|10_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-28T09:27:03.867556.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-28T09:27:03.867556.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_28T09_27_03.867556", "path": ["results_2023-08-28T09:27:03.867556.parquet"]}, {"split": "latest", "path": ["results_2023-08-28T09:27:03.867556.parquet"]}]}]}
2023-08-28T08:28:28+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of yeontaek/llama-2-13B-ensemble-v4 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model yeontaek/llama-2-13B-ensemble-v4 on the Open LLM Leaderboard. The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-08-28T09:27:03.867556: ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of yeontaek/llama-2-13B-ensemble-v4", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model yeontaek/llama-2-13B-ensemble-v4 on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-08-28T09:27:03.867556:", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of yeontaek/llama-2-13B-ensemble-v4", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model yeontaek/llama-2-13B-ensemble-v4 on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-08-28T09:27:03.867556:", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 23, 31, 171, 21, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of yeontaek/llama-2-13B-ensemble-v4## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model yeontaek/llama-2-13B-ensemble-v4 on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-08-28T09:27:03.867556:### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
eba7c4711673087e88370900e7cadc18fa64f29b
# Dataset Card for "llama2_classifying_and_explainning_v4" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
RikoteMaster/llama2_classifying_and_explainning_v4
[ "region:us" ]
2023-08-28T08:30:37+00:00
{"dataset_info": {"features": [{"name": "Explanation", "dtype": "string"}, {"name": "Text_processed", "dtype": "string"}, {"name": "Emotion", "dtype": "string"}, {"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 14114420, "num_examples": 13859}], "download_size": 3770656, "dataset_size": 14114420}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-08-28T08:30:40+00:00
[]
[]
TAGS #region-us
# Dataset Card for "llama2_classifying_and_explainning_v4" More Information needed
[ "# Dataset Card for \"llama2_classifying_and_explainning_v4\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"llama2_classifying_and_explainning_v4\"\n\nMore Information needed" ]
[ 6, 25 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"llama2_classifying_and_explainning_v4\"\n\nMore Information needed" ]
48dac7f52cf0809ab281fa9c0aebbe72c76fb2c1
# Dataset Card for Evaluation run of shareAI/CodeLLaMA-chat-13b-Chinese ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/shareAI/CodeLLaMA-chat-13b-Chinese - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [shareAI/CodeLLaMA-chat-13b-Chinese](https://huggingface.co/shareAI/CodeLLaMA-chat-13b-Chinese) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_shareAI__CodeLLaMA-chat-13b-Chinese", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-17T09:47:36.542667](https://huggingface.co/datasets/open-llm-leaderboard/details_shareAI__CodeLLaMA-chat-13b-Chinese/blob/main/results_2023-09-17T09-47-36.542667.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.001363255033557047, "em_stderr": 0.0003778609196460702, "f1": 0.054088716442953266, "f1_stderr": 0.00128126893252217, "acc": 0.3932130306078425, "acc_stderr": 0.010830541663790839 }, "harness|drop|3": { "em": 0.001363255033557047, "em_stderr": 0.0003778609196460702, "f1": 0.054088716442953266, "f1_stderr": 0.00128126893252217 }, "harness|gsm8k|5": { "acc": 0.10765731614859743, "acc_stderr": 0.008537484003023356 }, "harness|winogrande|5": { "acc": 0.6787687450670876, "acc_stderr": 0.013123599324558323 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_shareAI__CodeLLaMA-chat-13b-Chinese
[ "region:us" ]
2023-08-28T08:34:06+00:00
{"pretty_name": "Evaluation run of shareAI/CodeLLaMA-chat-13b-Chinese", "dataset_summary": "Dataset automatically created during the evaluation run of model [shareAI/CodeLLaMA-chat-13b-Chinese](https://huggingface.co/shareAI/CodeLLaMA-chat-13b-Chinese) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_shareAI__CodeLLaMA-chat-13b-Chinese\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-17T09:47:36.542667](https://huggingface.co/datasets/open-llm-leaderboard/details_shareAI__CodeLLaMA-chat-13b-Chinese/blob/main/results_2023-09-17T09-47-36.542667.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.001363255033557047,\n \"em_stderr\": 0.0003778609196460702,\n \"f1\": 0.054088716442953266,\n \"f1_stderr\": 0.00128126893252217,\n \"acc\": 0.3932130306078425,\n \"acc_stderr\": 0.010830541663790839\n },\n \"harness|drop|3\": {\n \"em\": 0.001363255033557047,\n \"em_stderr\": 0.0003778609196460702,\n \"f1\": 0.054088716442953266,\n \"f1_stderr\": 0.00128126893252217\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.10765731614859743,\n \"acc_stderr\": 0.008537484003023356\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6787687450670876,\n \"acc_stderr\": 0.013123599324558323\n }\n}\n```", "repo_url": "https://huggingface.co/shareAI/CodeLLaMA-chat-13b-Chinese", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|arc:challenge|25_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_17T09_47_36.542667", "path": ["**/details_harness|drop|3_2023-09-17T09-47-36.542667.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-17T09-47-36.542667.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_17T09_47_36.542667", "path": ["**/details_harness|gsm8k|5_2023-09-17T09-47-36.542667.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-17T09-47-36.542667.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hellaswag|10_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-28T09:33:43.580652.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-28T09:33:43.580652.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-28T09:33:43.580652.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_17T09_47_36.542667", "path": ["**/details_harness|winogrande|5_2023-09-17T09-47-36.542667.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-17T09-47-36.542667.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_28T09_33_43.580652", "path": ["results_2023-08-28T09:33:43.580652.parquet"]}, {"split": "2023_09_17T09_47_36.542667", "path": ["results_2023-09-17T09-47-36.542667.parquet"]}, {"split": "latest", "path": ["results_2023-09-17T09-47-36.542667.parquet"]}]}]}
2023-09-17T08:47:49+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of shareAI/CodeLLaMA-chat-13b-Chinese ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model shareAI/CodeLLaMA-chat-13b-Chinese on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-17T09:47:36.542667(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of shareAI/CodeLLaMA-chat-13b-Chinese", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model shareAI/CodeLLaMA-chat-13b-Chinese on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-17T09:47:36.542667(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of shareAI/CodeLLaMA-chat-13b-Chinese", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model shareAI/CodeLLaMA-chat-13b-Chinese on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-17T09:47:36.542667(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 23, 31, 171, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of shareAI/CodeLLaMA-chat-13b-Chinese## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model shareAI/CodeLLaMA-chat-13b-Chinese on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-17T09:47:36.542667(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
918de8e6101773f201abaf176b152c7964bc9b5b
asfasf
elior055/asd
[ "region:us" ]
2023-08-28T08:37:15+00:00
{}
2023-08-28T08:37:38+00:00
[]
[]
TAGS #region-us
asfasf
[]
[ "TAGS\n#region-us \n" ]
[ 6 ]
[ "passage: TAGS\n#region-us \n" ]
99e73dacd2dd082b895aa02d1ff856be0da259f1
# Dataset Card for Evaluation run of OpenAssistant/codellama-13b-oasst-sft-v10 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/OpenAssistant/codellama-13b-oasst-sft-v10 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [OpenAssistant/codellama-13b-oasst-sft-v10](https://huggingface.co/OpenAssistant/codellama-13b-oasst-sft-v10) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_OpenAssistant__codellama-13b-oasst-sft-v10", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-15T06:23:43.342371](https://huggingface.co/datasets/open-llm-leaderboard/details_OpenAssistant__codellama-13b-oasst-sft-v10/blob/main/results_2023-10-15T06-23-43.342371.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0019924496644295304, "em_stderr": 0.00045666764626669533, "f1": 0.07171875000000016, "f1_stderr": 0.0015908122454952622, "acc": 0.4049487994360847, "acc_stderr": 0.011226667727964289 }, "harness|drop|3": { "em": 0.0019924496644295304, "em_stderr": 0.00045666764626669533, "f1": 0.07171875000000016, "f1_stderr": 0.0015908122454952622 }, "harness|gsm8k|5": { "acc": 0.13191811978771797, "acc_stderr": 0.009321265253857515 }, "harness|winogrande|5": { "acc": 0.6779794790844514, "acc_stderr": 0.013132070202071064 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_OpenAssistant__codellama-13b-oasst-sft-v10
[ "region:us" ]
2023-08-28T08:43:07+00:00
{"pretty_name": "Evaluation run of OpenAssistant/codellama-13b-oasst-sft-v10", "dataset_summary": "Dataset automatically created during the evaluation run of model [OpenAssistant/codellama-13b-oasst-sft-v10](https://huggingface.co/OpenAssistant/codellama-13b-oasst-sft-v10) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_OpenAssistant__codellama-13b-oasst-sft-v10\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-15T06:23:43.342371](https://huggingface.co/datasets/open-llm-leaderboard/details_OpenAssistant__codellama-13b-oasst-sft-v10/blob/main/results_2023-10-15T06-23-43.342371.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0019924496644295304,\n \"em_stderr\": 0.00045666764626669533,\n \"f1\": 0.07171875000000016,\n \"f1_stderr\": 0.0015908122454952622,\n \"acc\": 0.4049487994360847,\n \"acc_stderr\": 0.011226667727964289\n },\n \"harness|drop|3\": {\n \"em\": 0.0019924496644295304,\n \"em_stderr\": 0.00045666764626669533,\n \"f1\": 0.07171875000000016,\n \"f1_stderr\": 0.0015908122454952622\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.13191811978771797,\n \"acc_stderr\": 0.009321265253857515\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6779794790844514,\n \"acc_stderr\": 0.013132070202071064\n }\n}\n```", "repo_url": "https://huggingface.co/OpenAssistant/codellama-13b-oasst-sft-v10", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|arc:challenge|25_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|arc:challenge|25_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|arc:challenge|25_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_15T06_23_43.342371", "path": ["**/details_harness|drop|3_2023-10-15T06-23-43.342371.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-15T06-23-43.342371.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_15T06_23_43.342371", "path": ["**/details_harness|gsm8k|5_2023-10-15T06-23-43.342371.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-15T06-23-43.342371.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hellaswag|10_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hellaswag|10_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hellaswag|10_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-28T09:42:44.871031.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-28T18:08:08.712288.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-management|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-virology|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-management|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-virology|5_2023-09-18T15-15-45.768968.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-management|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-virology|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["**/details_harness|truthfulqa:mc|0_2023-09-18T15-15-45.768968.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-09-18T15-15-45.768968.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_15T06_23_43.342371", "path": ["**/details_harness|winogrande|5_2023-10-15T06-23-43.342371.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-15T06-23-43.342371.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_28T09_42_44.871031", "path": ["results_2023-08-28T09:42:44.871031.parquet"]}, {"split": "2023_08_28T18_08_08.712288", "path": ["results_2023-08-28T18:08:08.712288.parquet"]}, {"split": "2023_09_18T15_15_45.768968", "path": ["results_2023-09-18T15-15-45.768968.parquet"]}, {"split": "2023_10_15T06_23_43.342371", "path": ["results_2023-10-15T06-23-43.342371.parquet"]}, {"split": "latest", "path": ["results_2023-10-15T06-23-43.342371.parquet"]}]}]}
2023-10-15T05:23:56+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of OpenAssistant/codellama-13b-oasst-sft-v10 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model OpenAssistant/codellama-13b-oasst-sft-v10 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-15T06:23:43.342371(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of OpenAssistant/codellama-13b-oasst-sft-v10", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model OpenAssistant/codellama-13b-oasst-sft-v10 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T06:23:43.342371(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of OpenAssistant/codellama-13b-oasst-sft-v10", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model OpenAssistant/codellama-13b-oasst-sft-v10 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T06:23:43.342371(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 29, 31, 177, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of OpenAssistant/codellama-13b-oasst-sft-v10## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model OpenAssistant/codellama-13b-oasst-sft-v10 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-15T06:23:43.342371(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
ac9983712d42021adddde8584a74bf7498938d43
# Dataset Card for Evaluation run of bigcode/starcoderplus ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/bigcode/starcoderplus - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [bigcode/starcoderplus](https://huggingface.co/bigcode/starcoderplus) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_bigcode__starcoderplus", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-15T01:45:50.036434](https://huggingface.co/datasets/open-llm-leaderboard/details_bigcode__starcoderplus/blob/main/results_2023-10-15T01-45-50.036434.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0012583892617449664, "em_stderr": 0.00036305608931191014, "f1": 0.05428062080536913, "f1_stderr": 0.0012821278013514389, "acc": 0.39022141932642523, "acc_stderr": 0.010183303049937573 }, "harness|drop|3": { "em": 0.0012583892617449664, "em_stderr": 0.00036305608931191014, "f1": 0.05428062080536913, "f1_stderr": 0.0012821278013514389 }, "harness|gsm8k|5": { "acc": 0.0803639120545868, "acc_stderr": 0.007488258573239077 }, "harness|winogrande|5": { "acc": 0.7000789265982637, "acc_stderr": 0.01287834752663607 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_bigcode__starcoderplus
[ "region:us" ]
2023-08-28T08:43:38+00:00
{"pretty_name": "Evaluation run of bigcode/starcoderplus", "dataset_summary": "Dataset automatically created during the evaluation run of model [bigcode/starcoderplus](https://huggingface.co/bigcode/starcoderplus) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_bigcode__starcoderplus\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-15T01:45:50.036434](https://huggingface.co/datasets/open-llm-leaderboard/details_bigcode__starcoderplus/blob/main/results_2023-10-15T01-45-50.036434.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0012583892617449664,\n \"em_stderr\": 0.00036305608931191014,\n \"f1\": 0.05428062080536913,\n \"f1_stderr\": 0.0012821278013514389,\n \"acc\": 0.39022141932642523,\n \"acc_stderr\": 0.010183303049937573\n },\n \"harness|drop|3\": {\n \"em\": 0.0012583892617449664,\n \"em_stderr\": 0.00036305608931191014,\n \"f1\": 0.05428062080536913,\n \"f1_stderr\": 0.0012821278013514389\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0803639120545868,\n \"acc_stderr\": 0.007488258573239077\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7000789265982637,\n \"acc_stderr\": 0.01287834752663607\n }\n}\n```", "repo_url": "https://huggingface.co/bigcode/starcoderplus", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|arc:challenge|25_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_23T10_57_53.936866", "path": ["**/details_harness|drop|3_2023-09-23T10-57-53.936866.parquet"]}, {"split": "2023_10_15T01_45_50.036434", "path": ["**/details_harness|drop|3_2023-10-15T01-45-50.036434.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-15T01-45-50.036434.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_23T10_57_53.936866", "path": ["**/details_harness|gsm8k|5_2023-09-23T10-57-53.936866.parquet"]}, {"split": "2023_10_15T01_45_50.036434", "path": ["**/details_harness|gsm8k|5_2023-10-15T01-45-50.036434.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-15T01-45-50.036434.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hellaswag|10_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-28T09:43:16.279088.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-28T09:43:16.279088.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-28T09:43:16.279088.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_23T10_57_53.936866", "path": ["**/details_harness|winogrande|5_2023-09-23T10-57-53.936866.parquet"]}, {"split": "2023_10_15T01_45_50.036434", "path": ["**/details_harness|winogrande|5_2023-10-15T01-45-50.036434.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-15T01-45-50.036434.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_28T09_43_16.279088", "path": ["results_2023-08-28T09:43:16.279088.parquet"]}, {"split": "2023_09_23T10_57_53.936866", "path": ["results_2023-09-23T10-57-53.936866.parquet"]}, {"split": "2023_10_15T01_45_50.036434", "path": ["results_2023-10-15T01-45-50.036434.parquet"]}, {"split": "latest", "path": ["results_2023-10-15T01-45-50.036434.parquet"]}]}]}
2023-10-15T00:46:06+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of bigcode/starcoderplus ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model bigcode/starcoderplus on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-15T01:45:50.036434(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of bigcode/starcoderplus", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model bigcode/starcoderplus on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T01:45:50.036434(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of bigcode/starcoderplus", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model bigcode/starcoderplus on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T01:45:50.036434(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 16, 31, 164, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of bigcode/starcoderplus## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model bigcode/starcoderplus on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-15T01:45:50.036434(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
bec74ab65d5f22acea08d56e1655f4c2f19f5058
# Dataset Card for "reviewssyntheticdata100" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Dikshaa-malhotra/reviewssyntheticdata100
[ "region:us" ]
2023-08-28T08:49:33+00:00
{"dataset_info": {"features": [{"name": "product", "dtype": "string"}, {"name": "description", "dtype": "string"}, {"name": "critique", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 125175, "num_examples": 100}], "download_size": 90149, "dataset_size": 125175}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-08-28T08:49:36+00:00
[]
[]
TAGS #region-us
# Dataset Card for "reviewssyntheticdata100" More Information needed
[ "# Dataset Card for \"reviewssyntheticdata100\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"reviewssyntheticdata100\"\n\nMore Information needed" ]
[ 6, 17 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"reviewssyntheticdata100\"\n\nMore Information needed" ]
591622e62e822d43c0f6aac63413a3bf684a0561
# Dataset Card for "face-partuv2beautifulluv-controluv-ffhq10-samples" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
kasvii/face-partuv2beautifulluv-controluv-ffhq10-samples
[ "region:us" ]
2023-08-28T08:53:37+00:00
{"dataset_info": {"features": [{"name": "original_image", "dtype": "image"}, {"name": "edit_prompt", "dtype": "string"}, {"name": "edited_image", "dtype": "image"}, {"name": "control_image", "dtype": "image"}], "splits": [{"name": "train", "num_bytes": 7315417.0, "num_examples": 10}], "download_size": 4408703, "dataset_size": 7315417.0}}
2023-08-28T08:53:43+00:00
[]
[]
TAGS #region-us
# Dataset Card for "face-partuv2beautifulluv-controluv-ffhq10-samples" More Information needed
[ "# Dataset Card for \"face-partuv2beautifulluv-controluv-ffhq10-samples\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"face-partuv2beautifulluv-controluv-ffhq10-samples\"\n\nMore Information needed" ]
[ 6, 30 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"face-partuv2beautifulluv-controluv-ffhq10-samples\"\n\nMore Information needed" ]
58d5c955c0d7778927f42ae5544cd490724070ba
# Dataset Card for Evaluation run of bigcode/starcoder <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [bigcode/starcoder](https://huggingface.co/bigcode/starcoder) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 121 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_bigcode__starcoder", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-14T22:50:56.838467](https://huggingface.co/datasets/open-llm-leaderboard/details_bigcode__starcoder/blob/main/results_2024-02-14T22-50-56.838467.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.2969189890806991, "acc_stderr": 0.03236365511067932, "acc_norm": 0.2979650690177265, "acc_norm_stderr": 0.033097159757475146, "mc1": 0.25091799265605874, "mc1_stderr": 0.015176985027707689, "mc2": 0.4130412207453783, "mc2_stderr": 0.014976467041499917 }, "harness|arc:challenge|25": { "acc": 0.28071672354948807, "acc_stderr": 0.013131238126975574, "acc_norm": 0.302901023890785, "acc_norm_stderr": 0.013428241573185349 }, "harness|hellaswag|10": { "acc": 0.37860983867755427, "acc_stderr": 0.004840493603166207, "acc_norm": 0.4787890858394742, "acc_norm_stderr": 0.004985289555586536 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.23, "acc_stderr": 0.042295258468165044, "acc_norm": 0.23, "acc_norm_stderr": 0.042295258468165044 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.3037037037037037, "acc_stderr": 0.039725528847851375, "acc_norm": 0.3037037037037037, "acc_norm_stderr": 0.039725528847851375 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.2894736842105263, "acc_stderr": 0.036906779861372814, "acc_norm": 0.2894736842105263, "acc_norm_stderr": 0.036906779861372814 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.25660377358490566, "acc_stderr": 0.02688064788905197, "acc_norm": 0.25660377358490566, "acc_norm_stderr": 0.02688064788905197 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2986111111111111, "acc_stderr": 0.03827052357950756, "acc_norm": 0.2986111111111111, "acc_norm_stderr": 0.03827052357950756 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.24, "acc_stderr": 0.04292346959909283, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.32, "acc_stderr": 0.04688261722621505, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.23121387283236994, "acc_stderr": 0.032147373020294696, "acc_norm": 0.23121387283236994, "acc_norm_stderr": 0.032147373020294696 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237654, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237654 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.51, "acc_stderr": 0.05024183937956911, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956911 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.3021276595744681, "acc_stderr": 0.030017554471880554, "acc_norm": 0.3021276595744681, "acc_norm_stderr": 0.030017554471880554 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2982456140350877, "acc_stderr": 0.04303684033537315, "acc_norm": 0.2982456140350877, "acc_norm_stderr": 0.04303684033537315 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.43448275862068964, "acc_stderr": 0.041307408795554966, "acc_norm": 0.43448275862068964, "acc_norm_stderr": 0.041307408795554966 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2328042328042328, "acc_stderr": 0.02176596167215453, "acc_norm": 0.2328042328042328, "acc_norm_stderr": 0.02176596167215453 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.2857142857142857, "acc_stderr": 0.0404061017820884, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.0404061017820884 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.24193548387096775, "acc_stderr": 0.024362599693031076, "acc_norm": 0.24193548387096775, "acc_norm_stderr": 0.024362599693031076 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.21674876847290642, "acc_stderr": 0.028990331252516235, "acc_norm": 0.21674876847290642, "acc_norm_stderr": 0.028990331252516235 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.3696969696969697, "acc_stderr": 0.03769430314512568, "acc_norm": 0.3696969696969697, "acc_norm_stderr": 0.03769430314512568 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.19696969696969696, "acc_stderr": 0.02833560973246335, "acc_norm": 0.19696969696969696, "acc_norm_stderr": 0.02833560973246335 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.24352331606217617, "acc_stderr": 0.030975436386845426, "acc_norm": 0.24352331606217617, "acc_norm_stderr": 0.030975436386845426 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.24615384615384617, "acc_stderr": 0.021840866990423088, "acc_norm": 0.24615384615384617, "acc_norm_stderr": 0.021840866990423088 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2740740740740741, "acc_stderr": 0.027195934804085626, "acc_norm": 0.2740740740740741, "acc_norm_stderr": 0.027195934804085626 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.2605042016806723, "acc_stderr": 0.028510251512341937, "acc_norm": 0.2605042016806723, "acc_norm_stderr": 0.028510251512341937 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2185430463576159, "acc_stderr": 0.033742355504256936, "acc_norm": 0.2185430463576159, "acc_norm_stderr": 0.033742355504256936 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.21284403669724772, "acc_stderr": 0.01754937638931369, "acc_norm": 0.21284403669724772, "acc_norm_stderr": 0.01754937638931369 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.17592592592592593, "acc_stderr": 0.025967420958258533, "acc_norm": 0.17592592592592593, "acc_norm_stderr": 0.025967420958258533 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.25980392156862747, "acc_stderr": 0.030778554678693268, "acc_norm": 0.25980392156862747, "acc_norm_stderr": 0.030778554678693268 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.3080168776371308, "acc_stderr": 0.0300523893356057, "acc_norm": 0.3080168776371308, "acc_norm_stderr": 0.0300523893356057 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.34977578475336324, "acc_stderr": 0.03200736719484503, "acc_norm": 0.34977578475336324, "acc_norm_stderr": 0.03200736719484503 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.3053435114503817, "acc_stderr": 0.040393149787245605, "acc_norm": 0.3053435114503817, "acc_norm_stderr": 0.040393149787245605 }, "harness|hendrycksTest-international_law|5": { "acc": 0.39669421487603307, "acc_stderr": 0.044658697805310094, "acc_norm": 0.39669421487603307, "acc_norm_stderr": 0.044658697805310094 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25925925925925924, "acc_stderr": 0.04236511258094632, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.04236511258094632 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.2392638036809816, "acc_stderr": 0.03351953879521269, "acc_norm": 0.2392638036809816, "acc_norm_stderr": 0.03351953879521269 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3125, "acc_stderr": 0.043994650575715215, "acc_norm": 0.3125, "acc_norm_stderr": 0.043994650575715215 }, "harness|hendrycksTest-management|5": { "acc": 0.24271844660194175, "acc_stderr": 0.04245022486384495, "acc_norm": 0.24271844660194175, "acc_norm_stderr": 0.04245022486384495 }, "harness|hendrycksTest-marketing|5": { "acc": 0.4017094017094017, "acc_stderr": 0.03211693751051622, "acc_norm": 0.4017094017094017, "acc_norm_stderr": 0.03211693751051622 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.3052362707535121, "acc_stderr": 0.016467711947635112, "acc_norm": 0.3052362707535121, "acc_norm_stderr": 0.016467711947635112 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.36127167630057805, "acc_stderr": 0.025862201852277895, "acc_norm": 0.36127167630057805, "acc_norm_stderr": 0.025862201852277895 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2435754189944134, "acc_stderr": 0.014355911964767864, "acc_norm": 0.2435754189944134, "acc_norm_stderr": 0.014355911964767864 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.3006535947712418, "acc_stderr": 0.026256053835718968, "acc_norm": 0.3006535947712418, "acc_norm_stderr": 0.026256053835718968 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.33762057877813506, "acc_stderr": 0.026858825879488547, "acc_norm": 0.33762057877813506, "acc_norm_stderr": 0.026858825879488547 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.31790123456790126, "acc_stderr": 0.02591006352824088, "acc_norm": 0.31790123456790126, "acc_norm_stderr": 0.02591006352824088 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2695035460992908, "acc_stderr": 0.026469036818590624, "acc_norm": 0.2695035460992908, "acc_norm_stderr": 0.026469036818590624 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2757496740547588, "acc_stderr": 0.011413813609161, "acc_norm": 0.2757496740547588, "acc_norm_stderr": 0.011413813609161 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.20588235294117646, "acc_stderr": 0.02456220431414232, "acc_norm": 0.20588235294117646, "acc_norm_stderr": 0.02456220431414232 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.3088235294117647, "acc_stderr": 0.018690850273595273, "acc_norm": 0.3088235294117647, "acc_norm_stderr": 0.018690850273595273 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.3181818181818182, "acc_stderr": 0.044612721759105085, "acc_norm": 0.3181818181818182, "acc_norm_stderr": 0.044612721759105085 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.24897959183673468, "acc_stderr": 0.02768297952296023, "acc_norm": 0.24897959183673468, "acc_norm_stderr": 0.02768297952296023 }, "harness|hendrycksTest-sociology|5": { "acc": 0.34328358208955223, "acc_stderr": 0.03357379665433431, "acc_norm": 0.34328358208955223, "acc_norm_stderr": 0.03357379665433431 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-virology|5": { "acc": 0.3192771084337349, "acc_stderr": 0.03629335329947861, "acc_norm": 0.3192771084337349, "acc_norm_stderr": 0.03629335329947861 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.3216374269005848, "acc_stderr": 0.03582529442573122, "acc_norm": 0.3216374269005848, "acc_norm_stderr": 0.03582529442573122 }, "harness|truthfulqa:mc|0": { "mc1": 0.25091799265605874, "mc1_stderr": 0.015176985027707689, "mc2": 0.4130412207453783, "mc2_stderr": 0.014976467041499917 }, "harness|winogrande|5": { "acc": 0.5627466456195738, "acc_stderr": 0.013941393310695917 }, "harness|gsm8k|5": { "acc": 0.09173616376042457, "acc_stderr": 0.007950942148339347 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_bigcode__starcoder
[ "region:us" ]
2023-08-28T08:54:21+00:00
{"pretty_name": "Evaluation run of bigcode/starcoder", "dataset_summary": "Dataset automatically created during the evaluation run of model [bigcode/starcoder](https://huggingface.co/bigcode/starcoder) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 121 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_bigcode__starcoder\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-14T22:50:56.838467](https://huggingface.co/datasets/open-llm-leaderboard/details_bigcode__starcoder/blob/main/results_2024-02-14T22-50-56.838467.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2969189890806991,\n \"acc_stderr\": 0.03236365511067932,\n \"acc_norm\": 0.2979650690177265,\n \"acc_norm_stderr\": 0.033097159757475146,\n \"mc1\": 0.25091799265605874,\n \"mc1_stderr\": 0.015176985027707689,\n \"mc2\": 0.4130412207453783,\n \"mc2_stderr\": 0.014976467041499917\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.28071672354948807,\n \"acc_stderr\": 0.013131238126975574,\n \"acc_norm\": 0.302901023890785,\n \"acc_norm_stderr\": 0.013428241573185349\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.37860983867755427,\n \"acc_stderr\": 0.004840493603166207,\n \"acc_norm\": 0.4787890858394742,\n \"acc_norm_stderr\": 0.004985289555586536\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.042295258468165044,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.042295258468165044\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.3037037037037037,\n \"acc_stderr\": 0.039725528847851375,\n \"acc_norm\": 0.3037037037037037,\n \"acc_norm_stderr\": 0.039725528847851375\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.2894736842105263,\n \"acc_stderr\": 0.036906779861372814,\n \"acc_norm\": 0.2894736842105263,\n \"acc_norm_stderr\": 0.036906779861372814\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.25660377358490566,\n \"acc_stderr\": 0.02688064788905197,\n \"acc_norm\": 0.25660377358490566,\n \"acc_norm_stderr\": 0.02688064788905197\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2986111111111111,\n \"acc_stderr\": 0.03827052357950756,\n \"acc_norm\": 0.2986111111111111,\n \"acc_norm_stderr\": 0.03827052357950756\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.23121387283236994,\n \"acc_stderr\": 0.032147373020294696,\n \"acc_norm\": 0.23121387283236994,\n \"acc_norm_stderr\": 0.032147373020294696\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237654,\n \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237654\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956911,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956911\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.3021276595744681,\n \"acc_stderr\": 0.030017554471880554,\n \"acc_norm\": 0.3021276595744681,\n \"acc_norm_stderr\": 0.030017554471880554\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2982456140350877,\n \"acc_stderr\": 0.04303684033537315,\n \"acc_norm\": 0.2982456140350877,\n \"acc_norm_stderr\": 0.04303684033537315\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.43448275862068964,\n \"acc_stderr\": 0.041307408795554966,\n \"acc_norm\": 0.43448275862068964,\n \"acc_norm_stderr\": 0.041307408795554966\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2328042328042328,\n \"acc_stderr\": 0.02176596167215453,\n \"acc_norm\": 0.2328042328042328,\n \"acc_norm_stderr\": 0.02176596167215453\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.0404061017820884,\n \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.0404061017820884\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.24193548387096775,\n \"acc_stderr\": 0.024362599693031076,\n \"acc_norm\": 0.24193548387096775,\n \"acc_norm_stderr\": 0.024362599693031076\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.21674876847290642,\n \"acc_stderr\": 0.028990331252516235,\n \"acc_norm\": 0.21674876847290642,\n \"acc_norm_stderr\": 0.028990331252516235\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.3696969696969697,\n \"acc_stderr\": 0.03769430314512568,\n \"acc_norm\": 0.3696969696969697,\n \"acc_norm_stderr\": 0.03769430314512568\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.19696969696969696,\n \"acc_stderr\": 0.02833560973246335,\n \"acc_norm\": 0.19696969696969696,\n \"acc_norm_stderr\": 0.02833560973246335\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.24352331606217617,\n \"acc_stderr\": 0.030975436386845426,\n \"acc_norm\": 0.24352331606217617,\n \"acc_norm_stderr\": 0.030975436386845426\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.24615384615384617,\n \"acc_stderr\": 0.021840866990423088,\n \"acc_norm\": 0.24615384615384617,\n \"acc_norm_stderr\": 0.021840866990423088\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2740740740740741,\n \"acc_stderr\": 0.027195934804085626,\n \"acc_norm\": 0.2740740740740741,\n \"acc_norm_stderr\": 0.027195934804085626\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.2605042016806723,\n \"acc_stderr\": 0.028510251512341937,\n \"acc_norm\": 0.2605042016806723,\n \"acc_norm_stderr\": 0.028510251512341937\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2185430463576159,\n \"acc_stderr\": 0.033742355504256936,\n \"acc_norm\": 0.2185430463576159,\n \"acc_norm_stderr\": 0.033742355504256936\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.21284403669724772,\n \"acc_stderr\": 0.01754937638931369,\n \"acc_norm\": 0.21284403669724772,\n \"acc_norm_stderr\": 0.01754937638931369\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.17592592592592593,\n \"acc_stderr\": 0.025967420958258533,\n \"acc_norm\": 0.17592592592592593,\n \"acc_norm_stderr\": 0.025967420958258533\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.25980392156862747,\n \"acc_stderr\": 0.030778554678693268,\n \"acc_norm\": 0.25980392156862747,\n \"acc_norm_stderr\": 0.030778554678693268\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.3080168776371308,\n \"acc_stderr\": 0.0300523893356057,\n \"acc_norm\": 0.3080168776371308,\n \"acc_norm_stderr\": 0.0300523893356057\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.34977578475336324,\n \"acc_stderr\": 0.03200736719484503,\n \"acc_norm\": 0.34977578475336324,\n \"acc_norm_stderr\": 0.03200736719484503\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.3053435114503817,\n \"acc_stderr\": 0.040393149787245605,\n \"acc_norm\": 0.3053435114503817,\n \"acc_norm_stderr\": 0.040393149787245605\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.39669421487603307,\n \"acc_stderr\": 0.044658697805310094,\n \"acc_norm\": 0.39669421487603307,\n \"acc_norm_stderr\": 0.044658697805310094\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.04236511258094632,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.04236511258094632\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.2392638036809816,\n \"acc_stderr\": 0.03351953879521269,\n \"acc_norm\": 0.2392638036809816,\n \"acc_norm_stderr\": 0.03351953879521269\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3125,\n \"acc_stderr\": 0.043994650575715215,\n \"acc_norm\": 0.3125,\n \"acc_norm_stderr\": 0.043994650575715215\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.24271844660194175,\n \"acc_stderr\": 0.04245022486384495,\n \"acc_norm\": 0.24271844660194175,\n \"acc_norm_stderr\": 0.04245022486384495\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.4017094017094017,\n \"acc_stderr\": 0.03211693751051622,\n \"acc_norm\": 0.4017094017094017,\n \"acc_norm_stderr\": 0.03211693751051622\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252604,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252604\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.3052362707535121,\n \"acc_stderr\": 0.016467711947635112,\n \"acc_norm\": 0.3052362707535121,\n \"acc_norm_stderr\": 0.016467711947635112\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.36127167630057805,\n \"acc_stderr\": 0.025862201852277895,\n \"acc_norm\": 0.36127167630057805,\n \"acc_norm_stderr\": 0.025862201852277895\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2435754189944134,\n \"acc_stderr\": 0.014355911964767864,\n \"acc_norm\": 0.2435754189944134,\n \"acc_norm_stderr\": 0.014355911964767864\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.3006535947712418,\n \"acc_stderr\": 0.026256053835718968,\n \"acc_norm\": 0.3006535947712418,\n \"acc_norm_stderr\": 0.026256053835718968\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.33762057877813506,\n \"acc_stderr\": 0.026858825879488547,\n \"acc_norm\": 0.33762057877813506,\n \"acc_norm_stderr\": 0.026858825879488547\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.31790123456790126,\n \"acc_stderr\": 0.02591006352824088,\n \"acc_norm\": 0.31790123456790126,\n \"acc_norm_stderr\": 0.02591006352824088\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.2695035460992908,\n \"acc_stderr\": 0.026469036818590624,\n \"acc_norm\": 0.2695035460992908,\n \"acc_norm_stderr\": 0.026469036818590624\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2757496740547588,\n \"acc_stderr\": 0.011413813609161,\n \"acc_norm\": 0.2757496740547588,\n \"acc_norm_stderr\": 0.011413813609161\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.20588235294117646,\n \"acc_stderr\": 0.02456220431414232,\n \"acc_norm\": 0.20588235294117646,\n \"acc_norm_stderr\": 0.02456220431414232\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.3088235294117647,\n \"acc_stderr\": 0.018690850273595273,\n \"acc_norm\": 0.3088235294117647,\n \"acc_norm_stderr\": 0.018690850273595273\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.3181818181818182,\n \"acc_stderr\": 0.044612721759105085,\n \"acc_norm\": 0.3181818181818182,\n \"acc_norm_stderr\": 0.044612721759105085\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.24897959183673468,\n \"acc_stderr\": 0.02768297952296023,\n \"acc_norm\": 0.24897959183673468,\n \"acc_norm_stderr\": 0.02768297952296023\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.34328358208955223,\n \"acc_stderr\": 0.03357379665433431,\n \"acc_norm\": 0.34328358208955223,\n \"acc_norm_stderr\": 0.03357379665433431\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3192771084337349,\n \"acc_stderr\": 0.03629335329947861,\n \"acc_norm\": 0.3192771084337349,\n \"acc_norm_stderr\": 0.03629335329947861\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.3216374269005848,\n \"acc_stderr\": 0.03582529442573122,\n \"acc_norm\": 0.3216374269005848,\n \"acc_norm_stderr\": 0.03582529442573122\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.25091799265605874,\n \"mc1_stderr\": 0.015176985027707689,\n \"mc2\": 0.4130412207453783,\n \"mc2_stderr\": 0.014976467041499917\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5627466456195738,\n \"acc_stderr\": 0.013941393310695917\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.09173616376042457,\n \"acc_stderr\": 0.007950942148339347\n }\n}\n```", "repo_url": "https://huggingface.co/bigcode/starcoder", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|arc:challenge|25_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|arc:challenge|25_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|gsm8k|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hellaswag|10_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hellaswag|10_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-28T09:53:59.312863.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-14T22-50-56.838467.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_14T22_50_56.838467", "path": ["**/details_harness|winogrande|5_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-14T22-50-56.838467.parquet"]}]}, {"config_name": "original_mmlu_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:abstract_algebra|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:anatomy|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:astronomy|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:business_ethics|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:clinical_knowledge|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:college_biology|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:college_chemistry|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:college_computer_science|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:college_mathematics|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:college_medicine|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:college_physics|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:computer_security|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:conceptual_physics|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:econometrics|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:electrical_engineering|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:elementary_mathematics|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:formal_logic|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:global_facts|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:high_school_biology|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:high_school_chemistry|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:high_school_computer_science|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:high_school_european_history|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:high_school_geography|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:high_school_government_and_politics|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:high_school_macroeconomics|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:high_school_mathematics|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:high_school_microeconomics|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:high_school_physics|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:high_school_psychology|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:high_school_statistics|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:high_school_us_history|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:high_school_world_history|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:human_aging|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:human_sexuality|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:international_law|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:jurisprudence|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:logical_fallacies|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:machine_learning|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:management|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:marketing|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:medical_genetics|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:miscellaneous|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:moral_disputes|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:moral_scenarios|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:nutrition|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:philosophy|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:prehistory|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:professional_accounting|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:professional_law|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:professional_medicine|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:professional_psychology|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:public_relations|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:security_studies|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:sociology|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:us_foreign_policy|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:virology|5_2023-08-28T21:17:20.453695.parquet", "**/details_original|mmlu:world_religions|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:abstract_algebra|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:anatomy|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:astronomy|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:business_ethics|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:clinical_knowledge|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:college_biology|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:college_chemistry|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:college_computer_science|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:college_mathematics|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:college_medicine|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:college_physics|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:computer_security|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:conceptual_physics|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:econometrics|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:electrical_engineering|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:elementary_mathematics|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:formal_logic|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:global_facts|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:high_school_biology|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:high_school_chemistry|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:high_school_computer_science|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:high_school_european_history|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:high_school_geography|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:high_school_government_and_politics|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:high_school_macroeconomics|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:high_school_mathematics|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:high_school_microeconomics|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:high_school_physics|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:high_school_psychology|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:high_school_statistics|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:high_school_us_history|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:high_school_world_history|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:human_aging|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:human_sexuality|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:international_law|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:jurisprudence|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:logical_fallacies|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:machine_learning|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:management|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:marketing|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:medical_genetics|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:miscellaneous|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:moral_disputes|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:moral_scenarios|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:nutrition|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:philosophy|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:prehistory|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:professional_accounting|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:professional_law|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:professional_medicine|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:professional_psychology|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:public_relations|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:security_studies|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:sociology|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:us_foreign_policy|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:virology|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:world_religions|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:abstract_algebra|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:anatomy|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:astronomy|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:business_ethics|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:clinical_knowledge|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:college_biology|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:college_chemistry|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:college_computer_science|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:college_mathematics|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:college_medicine|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:college_physics|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:computer_security|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:conceptual_physics|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:econometrics|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:electrical_engineering|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:elementary_mathematics|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:formal_logic|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:global_facts|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:high_school_biology|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:high_school_chemistry|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:high_school_computer_science|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:high_school_european_history|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:high_school_geography|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:high_school_government_and_politics|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:high_school_macroeconomics|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:high_school_mathematics|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:high_school_microeconomics|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:high_school_physics|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:high_school_psychology|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:high_school_statistics|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:high_school_us_history|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:high_school_world_history|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:human_aging|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:human_sexuality|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:international_law|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:jurisprudence|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:logical_fallacies|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:machine_learning|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:management|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:marketing|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:medical_genetics|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:miscellaneous|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:moral_disputes|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:moral_scenarios|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:nutrition|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:philosophy|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:prehistory|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:professional_accounting|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:professional_law|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:professional_medicine|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:professional_psychology|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:public_relations|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:security_studies|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:sociology|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:us_foreign_policy|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:virology|5_2023-08-28T21:18:29.614335.parquet", "**/details_original|mmlu:world_religions|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_abstract_algebra_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:abstract_algebra|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:abstract_algebra|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:abstract_algebra|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_anatomy_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:anatomy|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:anatomy|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:anatomy|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_astronomy_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:astronomy|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:astronomy|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:astronomy|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_business_ethics_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:business_ethics|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:business_ethics|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:business_ethics|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_clinical_knowledge_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:clinical_knowledge|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:clinical_knowledge|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:clinical_knowledge|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_college_biology_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:college_biology|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:college_biology|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:college_biology|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_college_chemistry_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:college_chemistry|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:college_chemistry|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:college_chemistry|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_college_computer_science_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:college_computer_science|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:college_computer_science|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:college_computer_science|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_college_mathematics_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:college_mathematics|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:college_mathematics|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:college_mathematics|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_college_medicine_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:college_medicine|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:college_medicine|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:college_medicine|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_college_physics_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:college_physics|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:college_physics|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:college_physics|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_computer_security_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:computer_security|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:computer_security|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:computer_security|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_conceptual_physics_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:conceptual_physics|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:conceptual_physics|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:conceptual_physics|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_econometrics_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:econometrics|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:econometrics|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:econometrics|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_electrical_engineering_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:electrical_engineering|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:electrical_engineering|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:electrical_engineering|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_elementary_mathematics_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:elementary_mathematics|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:elementary_mathematics|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:elementary_mathematics|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_formal_logic_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:formal_logic|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:formal_logic|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:formal_logic|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_global_facts_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:global_facts|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:global_facts|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:global_facts|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_high_school_biology_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:high_school_biology|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:high_school_biology|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_biology|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_high_school_chemistry_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:high_school_chemistry|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:high_school_chemistry|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_chemistry|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_high_school_computer_science_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:high_school_computer_science|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:high_school_computer_science|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_computer_science|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_high_school_european_history_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:high_school_european_history|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:high_school_european_history|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_european_history|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_high_school_geography_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:high_school_geography|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:high_school_geography|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_geography|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:high_school_government_and_politics|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:high_school_government_and_politics|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_government_and_politics|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:high_school_macroeconomics|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:high_school_macroeconomics|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_macroeconomics|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_high_school_mathematics_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:high_school_mathematics|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:high_school_mathematics|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_mathematics|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_high_school_microeconomics_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:high_school_microeconomics|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:high_school_microeconomics|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_microeconomics|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_high_school_physics_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:high_school_physics|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:high_school_physics|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_physics|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_high_school_psychology_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:high_school_psychology|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:high_school_psychology|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_psychology|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_high_school_statistics_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:high_school_statistics|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:high_school_statistics|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_statistics|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_high_school_us_history_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:high_school_us_history|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:high_school_us_history|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_us_history|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_high_school_world_history_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:high_school_world_history|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:high_school_world_history|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_world_history|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_human_aging_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:human_aging|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:human_aging|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:human_aging|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_human_sexuality_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:human_sexuality|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:human_sexuality|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:human_sexuality|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_international_law_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:international_law|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:international_law|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:international_law|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_jurisprudence_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:jurisprudence|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:jurisprudence|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:jurisprudence|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_logical_fallacies_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:logical_fallacies|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:logical_fallacies|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:logical_fallacies|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_machine_learning_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:machine_learning|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:machine_learning|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:machine_learning|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_management_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:management|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:management|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:management|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_marketing_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:marketing|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:marketing|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:marketing|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_medical_genetics_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:medical_genetics|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:medical_genetics|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:medical_genetics|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_miscellaneous_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:miscellaneous|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:miscellaneous|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:miscellaneous|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_moral_disputes_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:moral_disputes|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:moral_disputes|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:moral_disputes|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_moral_scenarios_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:moral_scenarios|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:moral_scenarios|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:moral_scenarios|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_nutrition_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:nutrition|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:nutrition|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:nutrition|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_philosophy_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:philosophy|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:philosophy|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:philosophy|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_prehistory_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:prehistory|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:prehistory|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:prehistory|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_professional_accounting_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:professional_accounting|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:professional_accounting|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:professional_accounting|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_professional_law_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:professional_law|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:professional_law|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:professional_law|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_professional_medicine_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:professional_medicine|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:professional_medicine|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:professional_medicine|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_professional_psychology_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:professional_psychology|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:professional_psychology|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:professional_psychology|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_public_relations_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:public_relations|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:public_relations|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:public_relations|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_security_studies_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:security_studies|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:security_studies|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:security_studies|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_sociology_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:sociology|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:sociology|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:sociology|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_us_foreign_policy_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:us_foreign_policy|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:us_foreign_policy|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:us_foreign_policy|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_virology_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:virology|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:virology|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:virology|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "original_mmlu_world_religions_5", "data_files": [{"split": "2023_08_28T21_17_20.453695", "path": ["**/details_original|mmlu:world_religions|5_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["**/details_original|mmlu:world_religions|5_2023-08-28T21:18:29.614335.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:world_religions|5_2023-08-28T21:18:29.614335.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_28T09_53_59.312863", "path": ["results_2023-08-28T09:53:59.312863.parquet"]}, {"split": "2023_08_28T21_17_20.453695", "path": ["results_2023-08-28T21:17:20.453695.parquet"]}, {"split": "2023_08_28T21_18_29.614335", "path": ["results_2023-08-28T21:18:29.614335.parquet"]}, {"split": "2024_02_14T22_50_56.838467", "path": ["results_2024-02-14T22-50-56.838467.parquet"]}, {"split": "latest", "path": ["results_2024-02-14T22-50-56.838467.parquet"]}]}]}
2024-02-14T22:53:49+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of bigcode/starcoder Dataset automatically created during the evaluation run of model bigcode/starcoder on the Open LLM Leaderboard. The dataset is composed of 121 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-14T22:50:56.838467(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of bigcode/starcoder\n\n\n\nDataset automatically created during the evaluation run of model bigcode/starcoder on the Open LLM Leaderboard.\n\nThe dataset is composed of 121 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-14T22:50:56.838467(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of bigcode/starcoder\n\n\n\nDataset automatically created during the evaluation run of model bigcode/starcoder on the Open LLM Leaderboard.\n\nThe dataset is composed of 121 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-14T22:50:56.838467(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 173, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of bigcode/starcoder\n\n\n\nDataset automatically created during the evaluation run of model bigcode/starcoder on the Open LLM Leaderboard.\n\nThe dataset is composed of 121 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-02-14T22:50:56.838467(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
8771b84b5497ea402b4d576c08f031d738ee8793
# Dataset Card for Dataset Name ## Dataset Description - **Homepage:** - **Repository:** - **Paper:** - **Leaderboard:** - **Point of Contact:** ### Dataset Summary This dataset card aims to be a base template for new datasets. It has been generated using [this raw template](https://github.com/huggingface/huggingface_hub/blob/main/src/huggingface_hub/templates/datasetcard_template.md?plain=1). ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
hiepnt/celeba
[ "region:us" ]
2023-08-28T08:55:39+00:00
{}
2023-08-28T08:59:07+00:00
[]
[]
TAGS #region-us
# Dataset Card for Dataset Name ## Dataset Description - Homepage: - Repository: - Paper: - Leaderboard: - Point of Contact: ### Dataset Summary This dataset card aims to be a base template for new datasets. It has been generated using this raw template. ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Dataset Name", "## Dataset Description\n\n- Homepage: \n- Repository: \n- Paper: \n- Leaderboard: \n- Point of Contact:", "### Dataset Summary\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Dataset Name", "## Dataset Description\n\n- Homepage: \n- Repository: \n- Paper: \n- Leaderboard: \n- Point of Contact:", "### Dataset Summary\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 8, 24, 32, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Dataset Name## Dataset Description\n\n- Homepage: \n- Repository: \n- Paper: \n- Leaderboard: \n- Point of Contact:### Dataset Summary\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
97b33bce10023c68ba3254fc127b75113f7ce3c8
**Produktname** - [KetoXBoom](https://ketoxboom-deutschland-1.jimdosite.com/) **Vorteile** - Fett und Gewicht zu verlieren, erhöhen Energie **Nebenwirkungen** - Keine größeren Nebenwirkungen, (100% natürlich) **Verfügbarkeit** - [Online](https://www.healthsupplement24x7.com/get-ketoxboom) **Offizielle Website** - [https://www.healthsupplement24x7.com/get-ketoxboom](https://www.healthsupplement24x7.com/get-ketoxboom) Die Menschen versuchen verschiedene Strategien, um überschüssiges Körpergewicht zu verlieren, aber sie scheitern, um ihre Ziele zu erreichen. Das ist der Grund, warum [KetoXBoom](https://healthsupplements24x7.blogspot.com/2023/08/ketoxboom.html) Deutschland geschaffen wurde; es hilft einfach bei der Entfernung von überschüssigem Gewicht aus dem Körper, verbessert die Verdauung und erhöht die Ausdauer und das Energieniveau. [![](https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEi1N_ypqSBgYLR10Z37BrU8tiEB9glypD3cfdHrt8TrYg_sAEXaI-0T3dXF4JHmplzdk1-adA6MtrSvyJiNwyR05xKtAbr_7-v5dSFl2aMThP1TnjWU85I2bSBitXGvdPJL96FzwraL5UmXm16BH-tD4xB0y2rGn3Occ3PKCqLlCa3nhWsRwwVTAWEFtcGV/w640-h360/wallpaperflare.com_wallpaper(6).jpg)](https://www.healthsupplement24x7.com/get-ketoxboom) ### **[Erhalten Sie verfügbaren Rabatt Preis auf der offiziellen Website](https://www.healthsupplement24x7.com/get-ketoxboom)** **Was sind KetoXBoom?** ----------------------- [KetoXBoom](https://www.sympla.com.br/evento/ketoxboom/2137844) ist ein innovatives Nahrungsergänzungsmittel, das zur Unterstützung einer schnellen Gewichtsabnahme entwickelt wurde. Diese Gummibärchen sind mit der Güte von Keto und Apfelessig versetzt, zwei kraftvollen Zutaten, die für ihre Fähigkeit bekannt sind, die Gewichtsabnahme zu fördern und die allgemeine Gesundheit zu verbessern. Die einzigartige Mischung der Inhaltsstoffe in Keto Gummies macht sie zu einer bequemen und köstlichen Möglichkeit, Ihren Weg zur Gewichtsabnahme zu unterstützen. **Wie funktioniert KetoXBoom?** ------------------------------- [KetoXBoom](https://radiohead.clubeo.com) kombiniert die Kraft von Keto und Apfelessig in einer praktischen Gummibärchenform. Diese Gummibärchen sind vollgepackt mit essentiellen Nähr- und Wirkstoffen, die Ihre Ziele bei der Gewichtsabnahme unterstützen. Durch die regelmäßige Einnahme von KetoXBoom können Sie Ihren Körper in Ketose versetzen und die Fettverbrennung beschleunigen. Die Gummies helfen außerdem, den Heißhunger zu zügeln, den Stoffwechsel anzukurbeln und die Verdauung zu verbessern, was das Abnehmen einfacher und angenehmer macht. **Vorteile von KetoXBoom** -------------------------- Wir stellen Ihnen [KetoXBoom](https://pdfhost.io/v/GsYdBzlYx_KetoXBoom_Deutschland_sterreich_Schweiz_Um_noch_schneller_und_effektiver_Gewicht_zu_verlieren) vor, ein innovatives Nahrungsergänzungsmittel zur Gewichtsreduktion, das BHB (Beta-Hydroxybutyrat) mit Apfelessig kombiniert, um Ihnen zu helfen, mehr Fett zu verbrennen und Ihre allgemeine Gesundheit zu verbessern. \- Die Gummies werden aus den besten Qualitätskomponenten hergestellt und sind professionell entwickelt worden, um synergetisch zu arbeiten, um Ihren Stoffwechsel anzukurbeln und bei der Gewichtsabnahme zu helfen.- Die Gummies enthalten außerdem eine spezielle Kombination aus notwendigen Vitaminen und Mineralien, die den ganzen Tag über ein anhaltendes Energieniveau aufrechterhalten, so dass die Verbraucher aktiv bleiben können, ohne Müdigkeit oder Schläfrigkeit zu verspüren. \- Dies verringert den Heißhunger auf Kohlenhydrate oder Süßigkeiten und fördert gleichzeitig die Aufrechterhaltung einer positiven Stimmung während des Tages. \- Neben der schnellen und effizienten Gewichtsabnahme bietet KetoXBoom zahlreiche weitere Vorteile, darunter eine verbesserte Konzentration und Aufmerksamkeit, mehr Energie, eine bessere Verdauung, einen besseren Schlaf, eine bessere Aufnahme von Nährstoffen aus Nahrungsmitteln und weniger durch oxidativen Stress bedingte Entzündungen im Körper. \- Ein erhöhter Kalorienverbrauch bei sportlichen Aktivitäten aufgrund einer schnelleren Stoffwechselrate sowie eine verbesserte geistige Klarheit können das allgemeine Gesundheitsprofil erheblich verbessern. \- Die Gummies kombinieren BHB (Beta-Hydroxybutyrat), das als Ketosalz bekannt ist und die Fettverbrennung steigert und die Gewichtsreduktion beschleunigt, mit Apfelessig, einem natürlichen Gesundheitsförderer. Diese Kombination von Nährstoffen, Vitaminen, Mineralien und Kräuterextrakten bietet einen schnellen und sicheren Weg, um in die Ketose zu gelangen. Um in den Genuss der Vorteile von KetoXBoom zu kommen, klicken Sie hier, um Ihren Vorrat jetzt zu bestellen! [![](https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEgLyeWZa-PwMKzJhRxuG5ACTx8MnIulZ3tTQ-dTyapy3ryT83fxwPeMDOmtQY2XM_7ndtlMXcwoz6eLKSEJcIKn0hsG-aBq8YrjS7DKcEvC_0z8_YSThRQntntEslWMYN1shOmd6mJSxqRttUmmMa8DQEFpC3X4OHNS7U4kurvm0Gh8kc3KXHvqaKo8Xpx_/w640-h360/wallpaperflare.com_wallpaper(2).jpg)](https://www.healthsupplement24x7.com/get-ketoxboom) ### **[Holen Sie sich KetoXBoom jetzt, solange es im Angebot ist - nur für begrenzte Zeit!](https://www.healthsupplement24x7.com/get-ketoxboom)** **KetoXBoom Inhaltsstoffe und Zusammensetzung** ----------------------------------------------- Nachdem das Produkt in unserem Test so erfolgreich war, wollten wir die genauen Inhaltsstoffe von KetoXBoom wissen. Es sind zwar nur 3 Inhaltsstoffe in den Kapseln, aber diese sind so genau aufeinander abgestimmt, dass im Grunde nur ein Erfolg eintreten kann. Die Kapseln enthalten die folgenden Inhaltsstoffe: **Löwenzahn** - Er ist reich an Antioxidantien, die bei der Entgiftung des Körpers helfen und die Hungerhormone stimulieren. Er hilft auch bei der Verbesserung der Verdauung, der Immunität und der Regulierung des Blutzuckerspiegels. Löwenzahntee wird auch zum effektiven und gesunden Fettabbau eingesetzt. **Grüner Tee** - Grüner Tee ist ein berühmtes Getränk, das potenziell dazu beiträgt, ein normales Körpergewicht zu halten und Fettleibigkeit auf signifikante Weise zu bekämpfen. Er bietet seinen Trinkern viele Vorteile und trägt zu einer verbesserten Hautgesundheit bei, fördert die geistige Konzentration und reduziert die Fettansammlung im Körper. **Curryblätter** - Es ist ein natürliches Kraut, das in vielen Gerichten verwendet wird und medizinische Eigenschaften hat. Die Extrakte aus Curryblättern haben das Potenzial, ungesunde Fettmoleküle anzugreifen und sie auf schnelle Weise zu schmelzen. Sie sind für eine schnelle Fettverbrennung bekannt. **L-Arginin**: Diese Aminosäure ist für den Muskelaufbau verantwortlich und fördert gleichzeitig die Muskelleistung. Athleten und Menschen, die schnell ermüden, profitieren von diesem Inhaltsstoff als optimale Unterstützung. **Garcinia cambogia extract**: Diese Pflanze stammt aus Südasien und kurbelt den Fettstoffwechsel an. Bei regelmäßiger Einnahme wird die Aufnahme von Fett gehemmt oder ganz blockiert. Der Extrakt dieser Pflanze gilt seit Jahren als gesundheitsfördernd und verlangsamt den Alterungsprozess. ### **[Sonderangebot: Klicken Sie hier, um ein exklusives Rabattangebot für KetoXBoom zu erhalten](https://www.healthsupplement24x7.com/get-ketoxboom)** **Wie verwende ich KetoXBoom zur Gewichtsabnahme?** --------------------------------------------------- Die Verwendung von KetoXBoom ist der Einnahme von Multivitaminen sehr ähnlich. Sie müssen eine Kapsel mit einem Glas Wasser einnehmen und sie sofort schlucken. Die Tagesdosis für dieses Produkt beträgt zwei Kapseln, jeweils eine Kapsel mit zwei großen Mahlzeiten des Tages. Am besten ist es, es mindestens eine halbe Stunde vor dem Essen einzunehmen. Auf diese Weise wird es aktiviert, bevor Sie die täglichen Kalorien zu sich nehmen, und die Diätkontrolle wird einfacher. **Besteht ein Risiko?** ----------------------- Die Einnahme von Diaetoxi-Kapseln ist risikofrei und sicher in der Anwendung. Da dies eine ehrliche Bewertung von KetoXBoom Tabletten ist, werden wir Ihnen alles darüber erzählen. [KetoXBoom](https://colab.research.google.com/drive/1xV1nj8B7Et8jSqKk3NqiAW03_XbDRwTj) wird weltweit verwendet und ist sehr beliebt bei Menschen, die effektiv abnehmen wollen. Bislang wurden von den Verbrauchern keine Risiken gemeldet. Aufgrund der natürlichen Inhaltsstoffe, die in KetoXBoom verwendet werden, steigt die Zahl der Konsumenten täglich stark an. Wie Sie wissen, durchläuft Ihr Körper während der Einnahme dieser Pillen den Prozess der Ketose, so dass Sie sich keine Sorgen um die Ergebnisse machen müssen. Die rein natürlichen Inhaltsstoffe haben auch das Vertrauen der Anwender gewonnen. **Was ist der Preis von KetoXBoom?** ------------------------------------ [KetoXBoom](https://www.ivoox.com/ketoxboom-deutschland-osterreich-schweiz-um-noch-schneller-und-audios-mp3_rf_114971212_1.html) ist ein leistungsfähiges Nahrungsergänzungsmittel, das nur auf seiner offiziellen Website erhältlich ist.Obwohl das Präparat $199 kostet, bieten die Hersteller einen zeitlich begrenzten Rabatt an, um die Benutzer zu unterstützen.Dies ist, wie die Ergänzung ist preislich. Sparpaket - 59,90 € + 4,90 € Versandkosten Best-Buy-Paket - 29,90 € Preis pro Packung Vorteilspaket - 36,60 € Preis pro Packung Diese Angebote gelten nicht mehr lange, da es schwierig ist, die Nährstoffe aus dem Himalaya zu beschaffen.Daher können Sie diese Ergänzung zu diesen Preisen nur bestellen, solange der Vorrat reicht. [![](https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEgt6Pb9rf5VDpWlCc2P-idYImkvOCEat2mlH5XSPS5CetKKgiiFKguiXGPgwEfNpgXLAoiVzCRIhRvpSAL2qY7enxfAKgbhg_VsAcv-SmFCQv4y5zcNYHk__2PCfEhS_G--YyP45U8aXsJ2Mukb0NeHLvp6lzv3W8yul2uu_Giu96SoOdn9YSQlzW_uDDhK/w630-h640/Screenshot%20(3116).png)](https://www.healthsupplement24x7.com/get-ketoxboom) ### **["KetoXBoom" (Klicken Sie hier für das beste verfügbare Angebot)](https://www.healthsupplement24x7.com/get-ketoxboom)** **Wo kann ich KetoXBoom zum besten Preis online kaufen?** --------------------------------------------------------- [KetoXBoom](https://radiohead.clubeo.com/calendar/2023/08/28/ketoxboom-deutschland-osterreich-schweiz-um-noch-schneller-und-effektiver-gewicht-zu-verlieren) ist ausschließlich auf der offiziellen Website erhältlich.Aufgrund der gestiegenen Nachfrage wird berichtet, dass Websites von Drittanbietern versuchen, eine Imitation des Originalprodukts anzubieten.Es ist am besten, KetoXBoom nur auf der offiziellen Website zu kaufen, um zu verhindern, dass Sie von diesen nicht autorisierten Verkäufern betrogen werden. Der offizielle Website-Link zur Bestellung **Abschließende Worte** ----------------------- Abschließend lässt sich sagen, dass [KetoXBoom](https://www.scoop.it/topic/ketoxboom-review) ein einzigartiges und effektives Nahrungsergänzungsmittel zur Gewichtsabnahme ist, das sich von der Konkurrenz abhebt, indem es sich auf die Regulierung der inneren Körpertemperatur konzentriert und einen umfassenden Ansatz zur Gewichtsabnahme bietet. Mit seinen natürlichen, aus den Alpen stammenden Inhaltsstoffen, Tausenden von zufriedenen Kunden und einer 60-Tage-Geld-zurück-Garantie ist KetoXBoom eine Überlegung wert, wenn Sie auf der Suche nach einer sicheren und effektiven Lösung sind, um Ihre Gewichtsabnahme zu unterstützen. [![](https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEiLeaPcYcQVbljcpgHMJRzcCDLmRlB7RQzzIYLDF5YVVZonsAZIy4m6e_Uy-Q63B0tA8ufCQu4U4JXyOBbAiwRhMps3qemrdTkc3aLaNU9kpuk0BWI8VgaYoq2CDq9uUhGQkcbL1QtDXbAKv2Gy7jcZiATxLPRZwa1fOik2rPWWtLxsTbf0hY3r-mmEjccT/w640-h360/wallpaperflare.com_wallpaper(8).jpg)](https://www.healthsupplement24x7.com/get-ketoxboom) ### **[Holen Sie sich KetoXBoom jetzt, solange es im Angebot ist - nur für begrenzte Zeit!](https://www.healthsupplement24x7.com/get-ketoxboom)** [https://healthsupplements24x7.blogspot.com/2023/08/ketoxboom.html](https://healthsupplements24x7.blogspot.com/2023/08/ketoxboom.html) [https://www.scoop.it/topic/ketoxboom-review](https://www.scoop.it/topic/ketoxboom-review) [https://www.sympla.com.br/evento/ketoxboom/2137844](https://www.sympla.com.br/evento/ketoxboom/2137844) [https://radiohead.clubeo.com](https://radiohead.clubeo.com) [https://radiohead.clubeo.com/calendar/2023/08/28/ketoxboom-deutschland-osterreich-schweiz-um-noch-schneller-und-effektiver-gewicht-zu-verlieren](https://radiohead.clubeo.com/calendar/2023/08/28/ketoxboom-deutschland-osterreich-schweiz-um-noch-schneller-und-effektiver-gewicht-zu-verlieren) [https://radiohead.clubeo.com/page/ketoxboom-deutschland-osterreich-schweiz-um-noch-schneller-und-effektiver-gewicht-zu-verlieren.html](https://radiohead.clubeo.com/page/ketoxboom-deutschland-osterreich-schweiz-um-noch-schneller-und-effektiver-gewicht-zu-verlieren.html) [https://radiohead.clubeo.com/page/ketoxboom-bewertungen-deutschland-osterreich-schweiz-um-noch-schneller-und-effektiver-gewicht-zu-verlieren.html](https://radiohead.clubeo.com/page/ketoxboom-bewertungen-deutschland-osterreich-schweiz-um-noch-schneller-und-effektiver-gewicht-zu-verlieren.html) [https://soundcloud.com/ketoxboom-554330821/ketoxboom-deutschland-osterreich-schweiz-um-noch-schneller-und-effektiver-gewicht-zu-verlieren](https://soundcloud.com/ketoxboom-554330821/ketoxboom-deutschland-osterreich-schweiz-um-noch-schneller-und-effektiver-gewicht-zu-verlieren) [https://ketoxboom-deutschland-1.jimdosite.com/](https://ketoxboom-deutschland-1.jimdosite.com/) [https://www.ivoox.com/ketoxboom-deutschland-osterreich-schweiz-um-noch-schneller-und-audios-mp3\_rf\_114971212\_1.html](https://www.ivoox.com/ketoxboom-deutschland-osterreich-schweiz-um-noch-schneller-und-audios-mp3_rf_114971212_1.html) [https://colab.research.google.com/drive/1xV1nj8B7Et8jSqKk3NqiAW03\_XbDRwTj](https://colab.research.google.com/drive/1xV1nj8B7Et8jSqKk3NqiAW03_XbDRwTj) [https://colab.research.google.com/drive/1R-o0-fx-Xdt9Rf4Jsyi9v9ZLwKsnrMO9](https://colab.research.google.com/drive/1R-o0-fx-Xdt9Rf4Jsyi9v9ZLwKsnrMO9) [https://colab.research.google.com/drive/19G6z3d0agJrZkZeVMIFaqJj4UUE3WqIj](https://colab.research.google.com/drive/19G6z3d0agJrZkZeVMIFaqJj4UUE3WqIj) [https://colab.research.google.com/drive/1MksPvrZ0nujFitChamHH6ds3OtEwc0oV](https://colab.research.google.com/drive/1MksPvrZ0nujFitChamHH6ds3OtEwc0oV) [https://colab.research.google.com/drive/10zWVTNmZZxFgFHy80iDImQ7Xe1H3tO2R](https://colab.research.google.com/drive/10zWVTNmZZxFgFHy80iDImQ7Xe1H3tO2R) [https://pdfhost.io/v/GsYdBzlYx\_KetoXBoom\_Deutschland\_sterreich\_Schweiz\_Um\_noch\_schneller\_und\_effektiver\_Gewicht\_zu\_verlieren](https://pdfhost.io/v/GsYdBzlYx_KetoXBoom_Deutschland_sterreich_Schweiz_Um_noch_schneller_und_effektiver_Gewicht_zu_verlieren) [https://form.jotform.com/ketoxboom/ketoxboom-deutschland](https://form.jotform.com/ketoxboom/ketoxboom-deutschland) [https://devfolio.co/@ketoxboom\_de](https://devfolio.co/@ketoxboom_de) [https://devfolio.co/projects/ketoxboom-b5d2](https://devfolio.co/projects/ketoxboom-b5d2)
KetoXBoom-Bewertungen/KetoXBoom
[ "region:us" ]
2023-08-28T09:01:16+00:00
{}
2023-08-28T09:01:41+00:00
[]
[]
TAGS #region-us
Produktname - KetoXBoom Vorteile - Fett und Gewicht zu verlieren, erhöhen Energie Nebenwirkungen - Keine größeren Nebenwirkungen, (100% natürlich) Verfügbarkeit - Online Offizielle Website - URL Die Menschen versuchen verschiedene Strategien, um überschüssiges Körpergewicht zu verlieren, aber sie scheitern, um ihre Ziele zu erreichen. Das ist der Grund, warum KetoXBoom Deutschland geschaffen wurde; es hilft einfach bei der Entfernung von überschüssigem Gewicht aus dem Körper, verbessert die Verdauung und erhöht die Ausdauer und das Energieniveau. ![.jpg)](URL ### Erhalten Sie verfügbaren Rabatt Preis auf der offiziellen Website Was sind KetoXBoom? ----------------------- KetoXBoom ist ein innovatives Nahrungsergänzungsmittel, das zur Unterstützung einer schnellen Gewichtsabnahme entwickelt wurde. Diese Gummibärchen sind mit der Güte von Keto und Apfelessig versetzt, zwei kraftvollen Zutaten, die für ihre Fähigkeit bekannt sind, die Gewichtsabnahme zu fördern und die allgemeine Gesundheit zu verbessern. Die einzigartige Mischung der Inhaltsstoffe in Keto Gummies macht sie zu einer bequemen und köstlichen Möglichkeit, Ihren Weg zur Gewichtsabnahme zu unterstützen. Wie funktioniert KetoXBoom? ------------------------------- KetoXBoom kombiniert die Kraft von Keto und Apfelessig in einer praktischen Gummibärchenform. Diese Gummibärchen sind vollgepackt mit essentiellen Nähr- und Wirkstoffen, die Ihre Ziele bei der Gewichtsabnahme unterstützen. Durch die regelmäßige Einnahme von KetoXBoom können Sie Ihren Körper in Ketose versetzen und die Fettverbrennung beschleunigen. Die Gummies helfen außerdem, den Heißhunger zu zügeln, den Stoffwechsel anzukurbeln und die Verdauung zu verbessern, was das Abnehmen einfacher und angenehmer macht. Vorteile von KetoXBoom -------------------------- Wir stellen Ihnen KetoXBoom vor, ein innovatives Nahrungsergänzungsmittel zur Gewichtsreduktion, das BHB (Beta-Hydroxybutyrat) mit Apfelessig kombiniert, um Ihnen zu helfen, mehr Fett zu verbrennen und Ihre allgemeine Gesundheit zu verbessern. \- Die Gummies werden aus den besten Qualitätskomponenten hergestellt und sind professionell entwickelt worden, um synergetisch zu arbeiten, um Ihren Stoffwechsel anzukurbeln und bei der Gewichtsabnahme zu helfen.- Die Gummies enthalten außerdem eine spezielle Kombination aus notwendigen Vitaminen und Mineralien, die den ganzen Tag über ein anhaltendes Energieniveau aufrechterhalten, so dass die Verbraucher aktiv bleiben können, ohne Müdigkeit oder Schläfrigkeit zu verspüren. \- Dies verringert den Heißhunger auf Kohlenhydrate oder Süßigkeiten und fördert gleichzeitig die Aufrechterhaltung einer positiven Stimmung während des Tages. \- Neben der schnellen und effizienten Gewichtsabnahme bietet KetoXBoom zahlreiche weitere Vorteile, darunter eine verbesserte Konzentration und Aufmerksamkeit, mehr Energie, eine bessere Verdauung, einen besseren Schlaf, eine bessere Aufnahme von Nährstoffen aus Nahrungsmitteln und weniger durch oxidativen Stress bedingte Entzündungen im Körper. \- Ein erhöhter Kalorienverbrauch bei sportlichen Aktivitäten aufgrund einer schnelleren Stoffwechselrate sowie eine verbesserte geistige Klarheit können das allgemeine Gesundheitsprofil erheblich verbessern. \- Die Gummies kombinieren BHB (Beta-Hydroxybutyrat), das als Ketosalz bekannt ist und die Fettverbrennung steigert und die Gewichtsreduktion beschleunigt, mit Apfelessig, einem natürlichen Gesundheitsförderer. Diese Kombination von Nährstoffen, Vitaminen, Mineralien und Kräuterextrakten bietet einen schnellen und sicheren Weg, um in die Ketose zu gelangen. Um in den Genuss der Vorteile von KetoXBoom zu kommen, klicken Sie hier, um Ihren Vorrat jetzt zu bestellen! ![.jpg)](URL ### Holen Sie sich KetoXBoom jetzt, solange es im Angebot ist - nur für begrenzte Zeit! KetoXBoom Inhaltsstoffe und Zusammensetzung ----------------------------------------------- Nachdem das Produkt in unserem Test so erfolgreich war, wollten wir die genauen Inhaltsstoffe von KetoXBoom wissen. Es sind zwar nur 3 Inhaltsstoffe in den Kapseln, aber diese sind so genau aufeinander abgestimmt, dass im Grunde nur ein Erfolg eintreten kann. Die Kapseln enthalten die folgenden Inhaltsstoffe: Löwenzahn - Er ist reich an Antioxidantien, die bei der Entgiftung des Körpers helfen und die Hungerhormone stimulieren. Er hilft auch bei der Verbesserung der Verdauung, der Immunität und der Regulierung des Blutzuckerspiegels. Löwenzahntee wird auch zum effektiven und gesunden Fettabbau eingesetzt. Grüner Tee - Grüner Tee ist ein berühmtes Getränk, das potenziell dazu beiträgt, ein normales Körpergewicht zu halten und Fettleibigkeit auf signifikante Weise zu bekämpfen. Er bietet seinen Trinkern viele Vorteile und trägt zu einer verbesserten Hautgesundheit bei, fördert die geistige Konzentration und reduziert die Fettansammlung im Körper. Curryblätter - Es ist ein natürliches Kraut, das in vielen Gerichten verwendet wird und medizinische Eigenschaften hat. Die Extrakte aus Curryblättern haben das Potenzial, ungesunde Fettmoleküle anzugreifen und sie auf schnelle Weise zu schmelzen. Sie sind für eine schnelle Fettverbrennung bekannt. L-Arginin: Diese Aminosäure ist für den Muskelaufbau verantwortlich und fördert gleichzeitig die Muskelleistung. Athleten und Menschen, die schnell ermüden, profitieren von diesem Inhaltsstoff als optimale Unterstützung. Garcinia cambogia extract: Diese Pflanze stammt aus Südasien und kurbelt den Fettstoffwechsel an. Bei regelmäßiger Einnahme wird die Aufnahme von Fett gehemmt oder ganz blockiert. Der Extrakt dieser Pflanze gilt seit Jahren als gesundheitsfördernd und verlangsamt den Alterungsprozess. ### Sonderangebot: Klicken Sie hier, um ein exklusives Rabattangebot für KetoXBoom zu erhalten Wie verwende ich KetoXBoom zur Gewichtsabnahme? --------------------------------------------------- Die Verwendung von KetoXBoom ist der Einnahme von Multivitaminen sehr ähnlich. Sie müssen eine Kapsel mit einem Glas Wasser einnehmen und sie sofort schlucken. Die Tagesdosis für dieses Produkt beträgt zwei Kapseln, jeweils eine Kapsel mit zwei großen Mahlzeiten des Tages. Am besten ist es, es mindestens eine halbe Stunde vor dem Essen einzunehmen. Auf diese Weise wird es aktiviert, bevor Sie die täglichen Kalorien zu sich nehmen, und die Diätkontrolle wird einfacher. Besteht ein Risiko? ----------------------- Die Einnahme von Diaetoxi-Kapseln ist risikofrei und sicher in der Anwendung. Da dies eine ehrliche Bewertung von KetoXBoom Tabletten ist, werden wir Ihnen alles darüber erzählen. KetoXBoom wird weltweit verwendet und ist sehr beliebt bei Menschen, die effektiv abnehmen wollen. Bislang wurden von den Verbrauchern keine Risiken gemeldet. Aufgrund der natürlichen Inhaltsstoffe, die in KetoXBoom verwendet werden, steigt die Zahl der Konsumenten täglich stark an. Wie Sie wissen, durchläuft Ihr Körper während der Einnahme dieser Pillen den Prozess der Ketose, so dass Sie sich keine Sorgen um die Ergebnisse machen müssen. Die rein natürlichen Inhaltsstoffe haben auch das Vertrauen der Anwender gewonnen. Was ist der Preis von KetoXBoom? ------------------------------------ KetoXBoom ist ein leistungsfähiges Nahrungsergänzungsmittel, das nur auf seiner offiziellen Website erhältlich ist.Obwohl das Präparat $199 kostet, bieten die Hersteller einen zeitlich begrenzten Rabatt an, um die Benutzer zu unterstützen.Dies ist, wie die Ergänzung ist preislich. Sparpaket - 59,90 € + 4,90 € Versandkosten Best-Buy-Paket - 29,90 € Preis pro Packung Vorteilspaket - 36,60 € Preis pro Packung Diese Angebote gelten nicht mehr lange, da es schwierig ist, die Nährstoffe aus dem Himalaya zu beschaffen.Daher können Sie diese Ergänzung zu diesen Preisen nur bestellen, solange der Vorrat reicht. ![.png)](URL ### "KetoXBoom" (Klicken Sie hier für das beste verfügbare Angebot) Wo kann ich KetoXBoom zum besten Preis online kaufen? --------------------------------------------------------- KetoXBoom ist ausschließlich auf der offiziellen Website erhältlich.Aufgrund der gestiegenen Nachfrage wird berichtet, dass Websites von Drittanbietern versuchen, eine Imitation des Originalprodukts anzubieten.Es ist am besten, KetoXBoom nur auf der offiziellen Website zu kaufen, um zu verhindern, dass Sie von diesen nicht autorisierten Verkäufern betrogen werden. Der offizielle Website-Link zur Bestellung Abschließende Worte ----------------------- Abschließend lässt sich sagen, dass KetoXBoom ein einzigartiges und effektives Nahrungsergänzungsmittel zur Gewichtsabnahme ist, das sich von der Konkurrenz abhebt, indem es sich auf die Regulierung der inneren Körpertemperatur konzentriert und einen umfassenden Ansatz zur Gewichtsabnahme bietet. Mit seinen natürlichen, aus den Alpen stammenden Inhaltsstoffen, Tausenden von zufriedenen Kunden und einer 60-Tage-Geld-zurück-Garantie ist KetoXBoom eine Überlegung wert, wenn Sie auf der Suche nach einer sicheren und effektiven Lösung sind, um Ihre Gewichtsabnahme zu unterstützen. ![.jpg)](URL ### Holen Sie sich KetoXBoom jetzt, solange es im Angebot ist - nur für begrenzte Zeit! URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL URL
[ "### Erhalten Sie verfügbaren Rabatt Preis auf der offiziellen Website\n\nWas sind KetoXBoom?\n-----------------------\n\nKetoXBoom ist ein innovatives Nahrungsergänzungsmittel, das zur Unterstützung einer schnellen Gewichtsabnahme entwickelt wurde. Diese Gummibärchen sind mit der Güte von Keto und Apfelessig versetzt, zwei kraftvollen Zutaten, die für ihre Fähigkeit bekannt sind, die Gewichtsabnahme zu fördern und die allgemeine Gesundheit zu verbessern. Die einzigartige Mischung der Inhaltsstoffe in Keto Gummies macht sie zu einer bequemen und köstlichen Möglichkeit, Ihren Weg zur Gewichtsabnahme zu unterstützen.\n\nWie funktioniert KetoXBoom?\n-------------------------------\n\nKetoXBoom kombiniert die Kraft von Keto und Apfelessig in einer praktischen Gummibärchenform. Diese Gummibärchen sind vollgepackt mit essentiellen Nähr- und Wirkstoffen, die Ihre Ziele bei der Gewichtsabnahme unterstützen. Durch die regelmäßige Einnahme von KetoXBoom können Sie Ihren Körper in Ketose versetzen und die Fettverbrennung beschleunigen. Die Gummies helfen außerdem, den Heißhunger zu zügeln, den Stoffwechsel anzukurbeln und die Verdauung zu verbessern, was das Abnehmen einfacher und angenehmer macht.\n\nVorteile von KetoXBoom\n--------------------------\n\nWir stellen Ihnen KetoXBoom vor, ein innovatives Nahrungsergänzungsmittel zur Gewichtsreduktion, das BHB (Beta-Hydroxybutyrat) mit Apfelessig kombiniert, um Ihnen zu helfen, mehr Fett zu verbrennen und Ihre allgemeine Gesundheit zu verbessern.\n\n\\- Die Gummies werden aus den besten Qualitätskomponenten hergestellt und sind professionell entwickelt worden, um synergetisch zu arbeiten, um Ihren Stoffwechsel anzukurbeln und bei der Gewichtsabnahme zu helfen.- Die Gummies enthalten außerdem eine spezielle Kombination aus notwendigen Vitaminen und Mineralien, die den ganzen Tag über ein anhaltendes Energieniveau aufrechterhalten, so dass die Verbraucher aktiv bleiben können, ohne Müdigkeit oder Schläfrigkeit zu verspüren.\n\n\\- Dies verringert den Heißhunger auf Kohlenhydrate oder Süßigkeiten und fördert gleichzeitig die Aufrechterhaltung einer positiven Stimmung während des Tages.\n\n\\- Neben der schnellen und effizienten Gewichtsabnahme bietet KetoXBoom zahlreiche weitere Vorteile, darunter eine verbesserte Konzentration und Aufmerksamkeit, mehr Energie, eine bessere Verdauung, einen besseren Schlaf, eine bessere Aufnahme von Nährstoffen aus Nahrungsmitteln und weniger durch oxidativen Stress bedingte Entzündungen im Körper.\n\n\\- Ein erhöhter Kalorienverbrauch bei sportlichen Aktivitäten aufgrund einer schnelleren Stoffwechselrate sowie eine verbesserte geistige Klarheit können das allgemeine Gesundheitsprofil erheblich verbessern.\n\n\\- Die Gummies kombinieren BHB (Beta-Hydroxybutyrat), das als Ketosalz bekannt ist und die Fettverbrennung steigert und die Gewichtsreduktion beschleunigt, mit Apfelessig, einem natürlichen Gesundheitsförderer. Diese Kombination von Nährstoffen, Vitaminen, Mineralien und Kräuterextrakten bietet einen schnellen und sicheren Weg, um in die Ketose zu gelangen. \nUm in den Genuss der Vorteile von KetoXBoom zu kommen, klicken Sie hier, um Ihren Vorrat jetzt zu bestellen!\n\n![.jpg)](URL", "### Holen Sie sich KetoXBoom jetzt, solange es im Angebot ist - nur für begrenzte Zeit!\n\nKetoXBoom Inhaltsstoffe und Zusammensetzung\n-----------------------------------------------\n\nNachdem das Produkt in unserem Test so erfolgreich war, wollten wir die genauen Inhaltsstoffe von KetoXBoom wissen. Es sind zwar nur 3 Inhaltsstoffe in den Kapseln, aber diese sind so genau aufeinander abgestimmt, dass im Grunde nur ein Erfolg eintreten kann. Die Kapseln enthalten die folgenden Inhaltsstoffe:\n\nLöwenzahn - Er ist reich an Antioxidantien, die bei der Entgiftung des Körpers helfen und die Hungerhormone stimulieren. Er hilft auch bei der Verbesserung der Verdauung, der Immunität und der Regulierung des Blutzuckerspiegels. Löwenzahntee wird auch zum effektiven und gesunden Fettabbau eingesetzt.\n\nGrüner Tee - Grüner Tee ist ein berühmtes Getränk, das potenziell dazu beiträgt, ein normales Körpergewicht zu halten und Fettleibigkeit auf signifikante Weise zu bekämpfen. Er bietet seinen Trinkern viele Vorteile und trägt zu einer verbesserten Hautgesundheit bei, fördert die geistige Konzentration und reduziert die Fettansammlung im Körper.\n\nCurryblätter - Es ist ein natürliches Kraut, das in vielen Gerichten verwendet wird und medizinische Eigenschaften hat. Die Extrakte aus Curryblättern haben das Potenzial, ungesunde Fettmoleküle anzugreifen und sie auf schnelle Weise zu schmelzen. Sie sind für eine schnelle Fettverbrennung bekannt.\n\nL-Arginin: Diese Aminosäure ist für den Muskelaufbau verantwortlich und fördert gleichzeitig die Muskelleistung. Athleten und Menschen, die schnell ermüden, profitieren von diesem Inhaltsstoff als optimale Unterstützung.\n\nGarcinia cambogia extract: Diese Pflanze stammt aus Südasien und kurbelt den Fettstoffwechsel an. Bei regelmäßiger Einnahme wird die Aufnahme von Fett gehemmt oder ganz blockiert. Der Extrakt dieser Pflanze gilt seit Jahren als gesundheitsfördernd und verlangsamt den Alterungsprozess.", "### Sonderangebot: Klicken Sie hier, um ein exklusives Rabattangebot für KetoXBoom zu erhalten\n\nWie verwende ich KetoXBoom zur Gewichtsabnahme?\n---------------------------------------------------\n\nDie Verwendung von KetoXBoom ist der Einnahme von Multivitaminen sehr ähnlich. Sie müssen eine Kapsel mit einem Glas Wasser einnehmen und sie sofort schlucken. Die Tagesdosis für dieses Produkt beträgt zwei Kapseln, jeweils eine Kapsel mit zwei großen Mahlzeiten des Tages. Am besten ist es, es mindestens eine halbe Stunde vor dem Essen einzunehmen. Auf diese Weise wird es aktiviert, bevor Sie die täglichen Kalorien zu sich nehmen, und die Diätkontrolle wird einfacher.\n\nBesteht ein Risiko?\n-----------------------\n\nDie Einnahme von Diaetoxi-Kapseln ist risikofrei und sicher in der Anwendung. Da dies eine ehrliche Bewertung von KetoXBoom Tabletten ist, werden wir Ihnen alles darüber erzählen. KetoXBoom wird weltweit verwendet und ist sehr beliebt bei Menschen, die effektiv abnehmen wollen. Bislang wurden von den Verbrauchern keine Risiken gemeldet. Aufgrund der natürlichen Inhaltsstoffe, die in KetoXBoom verwendet werden, steigt die Zahl der Konsumenten täglich stark an. Wie Sie wissen, durchläuft Ihr Körper während der Einnahme dieser Pillen den Prozess der Ketose, so dass Sie sich keine Sorgen um die Ergebnisse machen müssen. Die rein natürlichen Inhaltsstoffe haben auch das Vertrauen der Anwender gewonnen.\n\nWas ist der Preis von KetoXBoom?\n------------------------------------\n\nKetoXBoom ist ein leistungsfähiges Nahrungsergänzungsmittel, das nur auf seiner offiziellen Website erhältlich ist.Obwohl das Präparat $199 kostet, bieten die Hersteller einen zeitlich begrenzten Rabatt an, um die Benutzer zu unterstützen.Dies ist, wie die Ergänzung ist preislich.\n\nSparpaket - 59,90 € + 4,90 € Versandkosten\n\nBest-Buy-Paket - 29,90 € Preis pro Packung\n\nVorteilspaket - 36,60 € Preis pro Packung\n\nDiese Angebote gelten nicht mehr lange, da es schwierig ist, die Nährstoffe aus dem Himalaya zu beschaffen.Daher können Sie diese Ergänzung zu diesen Preisen nur bestellen, solange der Vorrat reicht.\n\n![.png)](URL", "### \"KetoXBoom\" (Klicken Sie hier für das beste verfügbare Angebot)\n\nWo kann ich KetoXBoom zum besten Preis online kaufen?\n---------------------------------------------------------\n\nKetoXBoom ist ausschließlich auf der offiziellen Website erhältlich.Aufgrund der gestiegenen Nachfrage wird berichtet, dass Websites von Drittanbietern versuchen, eine Imitation des Originalprodukts anzubieten.Es ist am besten, KetoXBoom nur auf der offiziellen Website zu kaufen, um zu verhindern, dass Sie von diesen nicht autorisierten Verkäufern betrogen werden. Der offizielle Website-Link zur Bestellung\n\nAbschließende Worte\n-----------------------\n\nAbschließend lässt sich sagen, dass KetoXBoom ein einzigartiges und effektives Nahrungsergänzungsmittel zur Gewichtsabnahme ist, das sich von der Konkurrenz abhebt, indem es sich auf die Regulierung der inneren Körpertemperatur konzentriert und einen umfassenden Ansatz zur Gewichtsabnahme bietet. Mit seinen natürlichen, aus den Alpen stammenden Inhaltsstoffen, Tausenden von zufriedenen Kunden und einer 60-Tage-Geld-zurück-Garantie ist KetoXBoom eine Überlegung wert, wenn Sie auf der Suche nach einer sicheren und effektiven Lösung sind, um Ihre Gewichtsabnahme zu unterstützen.\n\n![.jpg)](URL", "### Holen Sie sich KetoXBoom jetzt, solange es im Angebot ist - nur für begrenzte Zeit!\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL" ]
[ "TAGS\n#region-us \n", "### Erhalten Sie verfügbaren Rabatt Preis auf der offiziellen Website\n\nWas sind KetoXBoom?\n-----------------------\n\nKetoXBoom ist ein innovatives Nahrungsergänzungsmittel, das zur Unterstützung einer schnellen Gewichtsabnahme entwickelt wurde. Diese Gummibärchen sind mit der Güte von Keto und Apfelessig versetzt, zwei kraftvollen Zutaten, die für ihre Fähigkeit bekannt sind, die Gewichtsabnahme zu fördern und die allgemeine Gesundheit zu verbessern. Die einzigartige Mischung der Inhaltsstoffe in Keto Gummies macht sie zu einer bequemen und köstlichen Möglichkeit, Ihren Weg zur Gewichtsabnahme zu unterstützen.\n\nWie funktioniert KetoXBoom?\n-------------------------------\n\nKetoXBoom kombiniert die Kraft von Keto und Apfelessig in einer praktischen Gummibärchenform. Diese Gummibärchen sind vollgepackt mit essentiellen Nähr- und Wirkstoffen, die Ihre Ziele bei der Gewichtsabnahme unterstützen. Durch die regelmäßige Einnahme von KetoXBoom können Sie Ihren Körper in Ketose versetzen und die Fettverbrennung beschleunigen. Die Gummies helfen außerdem, den Heißhunger zu zügeln, den Stoffwechsel anzukurbeln und die Verdauung zu verbessern, was das Abnehmen einfacher und angenehmer macht.\n\nVorteile von KetoXBoom\n--------------------------\n\nWir stellen Ihnen KetoXBoom vor, ein innovatives Nahrungsergänzungsmittel zur Gewichtsreduktion, das BHB (Beta-Hydroxybutyrat) mit Apfelessig kombiniert, um Ihnen zu helfen, mehr Fett zu verbrennen und Ihre allgemeine Gesundheit zu verbessern.\n\n\\- Die Gummies werden aus den besten Qualitätskomponenten hergestellt und sind professionell entwickelt worden, um synergetisch zu arbeiten, um Ihren Stoffwechsel anzukurbeln und bei der Gewichtsabnahme zu helfen.- Die Gummies enthalten außerdem eine spezielle Kombination aus notwendigen Vitaminen und Mineralien, die den ganzen Tag über ein anhaltendes Energieniveau aufrechterhalten, so dass die Verbraucher aktiv bleiben können, ohne Müdigkeit oder Schläfrigkeit zu verspüren.\n\n\\- Dies verringert den Heißhunger auf Kohlenhydrate oder Süßigkeiten und fördert gleichzeitig die Aufrechterhaltung einer positiven Stimmung während des Tages.\n\n\\- Neben der schnellen und effizienten Gewichtsabnahme bietet KetoXBoom zahlreiche weitere Vorteile, darunter eine verbesserte Konzentration und Aufmerksamkeit, mehr Energie, eine bessere Verdauung, einen besseren Schlaf, eine bessere Aufnahme von Nährstoffen aus Nahrungsmitteln und weniger durch oxidativen Stress bedingte Entzündungen im Körper.\n\n\\- Ein erhöhter Kalorienverbrauch bei sportlichen Aktivitäten aufgrund einer schnelleren Stoffwechselrate sowie eine verbesserte geistige Klarheit können das allgemeine Gesundheitsprofil erheblich verbessern.\n\n\\- Die Gummies kombinieren BHB (Beta-Hydroxybutyrat), das als Ketosalz bekannt ist und die Fettverbrennung steigert und die Gewichtsreduktion beschleunigt, mit Apfelessig, einem natürlichen Gesundheitsförderer. Diese Kombination von Nährstoffen, Vitaminen, Mineralien und Kräuterextrakten bietet einen schnellen und sicheren Weg, um in die Ketose zu gelangen. \nUm in den Genuss der Vorteile von KetoXBoom zu kommen, klicken Sie hier, um Ihren Vorrat jetzt zu bestellen!\n\n![.jpg)](URL", "### Holen Sie sich KetoXBoom jetzt, solange es im Angebot ist - nur für begrenzte Zeit!\n\nKetoXBoom Inhaltsstoffe und Zusammensetzung\n-----------------------------------------------\n\nNachdem das Produkt in unserem Test so erfolgreich war, wollten wir die genauen Inhaltsstoffe von KetoXBoom wissen. Es sind zwar nur 3 Inhaltsstoffe in den Kapseln, aber diese sind so genau aufeinander abgestimmt, dass im Grunde nur ein Erfolg eintreten kann. Die Kapseln enthalten die folgenden Inhaltsstoffe:\n\nLöwenzahn - Er ist reich an Antioxidantien, die bei der Entgiftung des Körpers helfen und die Hungerhormone stimulieren. Er hilft auch bei der Verbesserung der Verdauung, der Immunität und der Regulierung des Blutzuckerspiegels. Löwenzahntee wird auch zum effektiven und gesunden Fettabbau eingesetzt.\n\nGrüner Tee - Grüner Tee ist ein berühmtes Getränk, das potenziell dazu beiträgt, ein normales Körpergewicht zu halten und Fettleibigkeit auf signifikante Weise zu bekämpfen. Er bietet seinen Trinkern viele Vorteile und trägt zu einer verbesserten Hautgesundheit bei, fördert die geistige Konzentration und reduziert die Fettansammlung im Körper.\n\nCurryblätter - Es ist ein natürliches Kraut, das in vielen Gerichten verwendet wird und medizinische Eigenschaften hat. Die Extrakte aus Curryblättern haben das Potenzial, ungesunde Fettmoleküle anzugreifen und sie auf schnelle Weise zu schmelzen. Sie sind für eine schnelle Fettverbrennung bekannt.\n\nL-Arginin: Diese Aminosäure ist für den Muskelaufbau verantwortlich und fördert gleichzeitig die Muskelleistung. Athleten und Menschen, die schnell ermüden, profitieren von diesem Inhaltsstoff als optimale Unterstützung.\n\nGarcinia cambogia extract: Diese Pflanze stammt aus Südasien und kurbelt den Fettstoffwechsel an. Bei regelmäßiger Einnahme wird die Aufnahme von Fett gehemmt oder ganz blockiert. Der Extrakt dieser Pflanze gilt seit Jahren als gesundheitsfördernd und verlangsamt den Alterungsprozess.", "### Sonderangebot: Klicken Sie hier, um ein exklusives Rabattangebot für KetoXBoom zu erhalten\n\nWie verwende ich KetoXBoom zur Gewichtsabnahme?\n---------------------------------------------------\n\nDie Verwendung von KetoXBoom ist der Einnahme von Multivitaminen sehr ähnlich. Sie müssen eine Kapsel mit einem Glas Wasser einnehmen und sie sofort schlucken. Die Tagesdosis für dieses Produkt beträgt zwei Kapseln, jeweils eine Kapsel mit zwei großen Mahlzeiten des Tages. Am besten ist es, es mindestens eine halbe Stunde vor dem Essen einzunehmen. Auf diese Weise wird es aktiviert, bevor Sie die täglichen Kalorien zu sich nehmen, und die Diätkontrolle wird einfacher.\n\nBesteht ein Risiko?\n-----------------------\n\nDie Einnahme von Diaetoxi-Kapseln ist risikofrei und sicher in der Anwendung. Da dies eine ehrliche Bewertung von KetoXBoom Tabletten ist, werden wir Ihnen alles darüber erzählen. KetoXBoom wird weltweit verwendet und ist sehr beliebt bei Menschen, die effektiv abnehmen wollen. Bislang wurden von den Verbrauchern keine Risiken gemeldet. Aufgrund der natürlichen Inhaltsstoffe, die in KetoXBoom verwendet werden, steigt die Zahl der Konsumenten täglich stark an. Wie Sie wissen, durchläuft Ihr Körper während der Einnahme dieser Pillen den Prozess der Ketose, so dass Sie sich keine Sorgen um die Ergebnisse machen müssen. Die rein natürlichen Inhaltsstoffe haben auch das Vertrauen der Anwender gewonnen.\n\nWas ist der Preis von KetoXBoom?\n------------------------------------\n\nKetoXBoom ist ein leistungsfähiges Nahrungsergänzungsmittel, das nur auf seiner offiziellen Website erhältlich ist.Obwohl das Präparat $199 kostet, bieten die Hersteller einen zeitlich begrenzten Rabatt an, um die Benutzer zu unterstützen.Dies ist, wie die Ergänzung ist preislich.\n\nSparpaket - 59,90 € + 4,90 € Versandkosten\n\nBest-Buy-Paket - 29,90 € Preis pro Packung\n\nVorteilspaket - 36,60 € Preis pro Packung\n\nDiese Angebote gelten nicht mehr lange, da es schwierig ist, die Nährstoffe aus dem Himalaya zu beschaffen.Daher können Sie diese Ergänzung zu diesen Preisen nur bestellen, solange der Vorrat reicht.\n\n![.png)](URL", "### \"KetoXBoom\" (Klicken Sie hier für das beste verfügbare Angebot)\n\nWo kann ich KetoXBoom zum besten Preis online kaufen?\n---------------------------------------------------------\n\nKetoXBoom ist ausschließlich auf der offiziellen Website erhältlich.Aufgrund der gestiegenen Nachfrage wird berichtet, dass Websites von Drittanbietern versuchen, eine Imitation des Originalprodukts anzubieten.Es ist am besten, KetoXBoom nur auf der offiziellen Website zu kaufen, um zu verhindern, dass Sie von diesen nicht autorisierten Verkäufern betrogen werden. Der offizielle Website-Link zur Bestellung\n\nAbschließende Worte\n-----------------------\n\nAbschließend lässt sich sagen, dass KetoXBoom ein einzigartiges und effektives Nahrungsergänzungsmittel zur Gewichtsabnahme ist, das sich von der Konkurrenz abhebt, indem es sich auf die Regulierung der inneren Körpertemperatur konzentriert und einen umfassenden Ansatz zur Gewichtsabnahme bietet. Mit seinen natürlichen, aus den Alpen stammenden Inhaltsstoffen, Tausenden von zufriedenen Kunden und einer 60-Tage-Geld-zurück-Garantie ist KetoXBoom eine Überlegung wert, wenn Sie auf der Suche nach einer sicheren und effektiven Lösung sind, um Ihre Gewichtsabnahme zu unterstützen.\n\n![.jpg)](URL", "### Holen Sie sich KetoXBoom jetzt, solange es im Angebot ist - nur für begrenzte Zeit!\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL\n\nURL" ]
[ 6, 753, 455, 492, 284, 45 ]
[ "passage: TAGS\n#region-us \n", "passage: ### Erhalten Sie verfügbaren Rabatt Preis auf der offiziellen Website\n\nWas sind KetoXBoom?\n-----------------------\n\nKetoXBoom ist ein innovatives Nahrungsergänzungsmittel, das zur Unterstützung einer schnellen Gewichtsabnahme entwickelt wurde. Diese Gummibärchen sind mit der Güte von Keto und Apfelessig versetzt, zwei kraftvollen Zutaten, die für ihre Fähigkeit bekannt sind, die Gewichtsabnahme zu fördern und die allgemeine Gesundheit zu verbessern. Die einzigartige Mischung der Inhaltsstoffe in Keto Gummies macht sie zu einer bequemen und köstlichen Möglichkeit, Ihren Weg zur Gewichtsabnahme zu unterstützen.\n\nWie funktioniert KetoXBoom?\n-------------------------------\n\nKetoXBoom kombiniert die Kraft von Keto und Apfelessig in einer praktischen Gummibärchenform. Diese Gummibärchen sind vollgepackt mit essentiellen Nähr- und Wirkstoffen, die Ihre Ziele bei der Gewichtsabnahme unterstützen. Durch die regelmäßige Einnahme von KetoXBoom können Sie Ihren Körper in Ketose versetzen und die Fettverbrennung beschleunigen. Die Gummies helfen außerdem, den Heißhunger zu zügeln, den Stoffwechsel anzukurbeln und die Verdauung zu verbessern, was das Abnehmen einfacher und angenehmer macht.\n\nVorteile von KetoXBoom\n--------------------------\n\nWir stellen Ihnen KetoXBoom vor, ein innovatives Nahrungsergänzungsmittel zur Gewichtsreduktion, das BHB (Beta-Hydroxybutyrat) mit Apfelessig kombiniert, um Ihnen zu helfen, mehr Fett zu verbrennen und Ihre allgemeine Gesundheit zu verbessern.\n\n\\- Die Gummies werden aus den besten Qualitätskomponenten hergestellt und sind professionell entwickelt worden, um synergetisch zu arbeiten, um Ihren Stoffwechsel anzukurbeln und bei der Gewichtsabnahme zu helfen.- Die Gummies enthalten außerdem eine spezielle Kombination aus notwendigen Vitaminen und Mineralien, die den ganzen Tag über ein anhaltendes Energieniveau aufrechterhalten, so dass die Verbraucher aktiv bleiben können, ohne Müdigkeit oder Schläfrigkeit zu verspüren.\n\n\\- Dies verringert den Heißhunger auf Kohlenhydrate oder Süßigkeiten und fördert gleichzeitig die Aufrechterhaltung einer positiven Stimmung während des Tages.\n\n\\- Neben der schnellen und effizienten Gewichtsabnahme bietet KetoXBoom zahlreiche weitere Vorteile, darunter eine verbesserte Konzentration und Aufmerksamkeit, mehr Energie, eine bessere Verdauung, einen besseren Schlaf, eine bessere Aufnahme von Nährstoffen aus Nahrungsmitteln und weniger durch oxidativen Stress bedingte Entzündungen im Körper.\n\n\\- Ein erhöhter Kalorienverbrauch bei sportlichen Aktivitäten aufgrund einer schnelleren Stoffwechselrate sowie eine verbesserte geistige Klarheit können das allgemeine Gesundheitsprofil erheblich verbessern.\n\n\\- Die Gummies kombinieren BHB (Beta-Hydroxybutyrat), das als Ketosalz bekannt ist und die Fettverbrennung steigert und die Gewichtsreduktion beschleunigt, mit Apfelessig, einem natürlichen Gesundheitsförderer. Diese Kombination von Nährstoffen, Vitaminen, Mineralien und Kräuterextrakten bietet einen schnellen und sicheren Weg, um in die Ketose zu gelangen. \nUm in den Genuss der Vorteile von KetoXBoom zu kommen, klicken Sie hier, um Ihren Vorrat jetzt zu bestellen!\n\n![.jpg)](URL### Holen Sie sich KetoXBoom jetzt, solange es im Angebot ist - nur für begrenzte Zeit!\n\nKetoXBoom Inhaltsstoffe und Zusammensetzung\n-----------------------------------------------\n\nNachdem das Produkt in unserem Test so erfolgreich war, wollten wir die genauen Inhaltsstoffe von KetoXBoom wissen. Es sind zwar nur 3 Inhaltsstoffe in den Kapseln, aber diese sind so genau aufeinander abgestimmt, dass im Grunde nur ein Erfolg eintreten kann. Die Kapseln enthalten die folgenden Inhaltsstoffe:\n\nLöwenzahn - Er ist reich an Antioxidantien, die bei der Entgiftung des Körpers helfen und die Hungerhormone stimulieren. Er hilft auch bei der Verbesserung der Verdauung, der Immunität und der Regulierung des Blutzuckerspiegels. Löwenzahntee wird auch zum effektiven und gesunden Fettabbau eingesetzt.\n\nGrüner Tee - Grüner Tee ist ein berühmtes Getränk, das potenziell dazu beiträgt, ein normales Körpergewicht zu halten und Fettleibigkeit auf signifikante Weise zu bekämpfen. Er bietet seinen Trinkern viele Vorteile und trägt zu einer verbesserten Hautgesundheit bei, fördert die geistige Konzentration und reduziert die Fettansammlung im Körper.\n\nCurryblätter - Es ist ein natürliches Kraut, das in vielen Gerichten verwendet wird und medizinische Eigenschaften hat. Die Extrakte aus Curryblättern haben das Potenzial, ungesunde Fettmoleküle anzugreifen und sie auf schnelle Weise zu schmelzen. Sie sind für eine schnelle Fettverbrennung bekannt.\n\nL-Arginin: Diese Aminosäure ist für den Muskelaufbau verantwortlich und fördert gleichzeitig die Muskelleistung. Athleten und Menschen, die schnell ermüden, profitieren von diesem Inhaltsstoff als optimale Unterstützung.\n\nGarcinia cambogia extract: Diese Pflanze stammt aus Südasien und kurbelt den Fettstoffwechsel an. Bei regelmäßiger Einnahme wird die Aufnahme von Fett gehemmt oder ganz blockiert. Der Extrakt dieser Pflanze gilt seit Jahren als gesundheitsfördernd und verlangsamt den Alterungsprozess." ]
81e583eac422c659f7c10b2a348fa9138fa0707f
This dataset is comprised of: 1. Crawled content that is machine translated from Cantonese to Simplified Chinese. 2. machine translated articlse from zh-yue.wikipedia.org 3. [botisan-ai/cantonese-mandarin-translations](https://huggingface.co/datasets/botisan-ai/cantonese-mandarin-translations) 4. [AlienKevin/LIHKG](https://huggingface.co/datasets/AlienKevin/LIHKG)
indiejoseph/yue-zh-translation
[ "task_categories:translation", "size_categories:10K<n<100K", "language:yue", "language:zh", "license:cc-by-4.0", "region:us" ]
2023-08-28T09:19:35+00:00
{"language": ["yue", "zh"], "license": "cc-by-4.0", "size_categories": ["10K<n<100K"], "task_categories": ["translation"], "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "translation", "struct": [{"name": "yue", "dtype": "string"}, {"name": "zh", "dtype": "string"}]}], "splits": [{"name": "train", "num_bytes": 16446012, "num_examples": 169949}, {"name": "test", "num_bytes": 4107525, "num_examples": 42361}], "download_size": 15755469, "dataset_size": 20553537}}
2023-10-08T19:52:38+00:00
[]
[ "yue", "zh" ]
TAGS #task_categories-translation #size_categories-10K<n<100K #language-Yue Chinese #language-Chinese #license-cc-by-4.0 #region-us
This dataset is comprised of: 1. Crawled content that is machine translated from Cantonese to Simplified Chinese. 2. machine translated articlse from URL 3. botisan-ai/cantonese-mandarin-translations 4. AlienKevin/LIHKG
[]
[ "TAGS\n#task_categories-translation #size_categories-10K<n<100K #language-Yue Chinese #language-Chinese #license-cc-by-4.0 #region-us \n" ]
[ 47 ]
[ "passage: TAGS\n#task_categories-translation #size_categories-10K<n<100K #language-Yue Chinese #language-Chinese #license-cc-by-4.0 #region-us \n" ]
a7ef5f447d326483eadb6a314518824acd9c81cb
# Dataset Card for "test_ds_2gpu_axolotl_bug" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
jphme/test_ds_2gpu_axolotl_bug
[ "region:us" ]
2023-08-28T09:21:47+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "dataset_info": {"features": [{"name": "instruction", "dtype": "string"}, {"name": "input", "dtype": "string"}, {"name": "output", "dtype": "string"}, {"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 559356, "num_examples": 100}], "download_size": 591450, "dataset_size": 559356}}
2023-08-28T10:39:36+00:00
[]
[]
TAGS #region-us
# Dataset Card for "test_ds_2gpu_axolotl_bug" More Information needed
[ "# Dataset Card for \"test_ds_2gpu_axolotl_bug\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"test_ds_2gpu_axolotl_bug\"\n\nMore Information needed" ]
[ 6, 24 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"test_ds_2gpu_axolotl_bug\"\n\nMore Information needed" ]
015c78f8615ab2e4bab03d5c74036ce91d3aa2e5
# Dataset of surtr (Arknights) This is the dataset of surtr (Arknights), containing 20 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). | Name | Images | Download | Description | |:------------|---------:|:-------------------------------------|:-------------------------------------------------------------------------| | raw | 20 | ![Download](dataset-raw.zip) | Raw data with meta information. | | raw-stage3 | 48 | ![Download](dataset-raw-stage3.zip) | 3-stage cropped raw data with meta information. | | 384x512 | 20 | ![Download](dataset-384x512.zip) | 384x512 aligned dataset. | | 512x512 | 20 | ![Download](dataset-512x512.zip) | 512x512 aligned dataset. | | 512x704 | 20 | ![Download](dataset-512x704.zip) | 512x704 aligned dataset. | | 640x640 | 20 | ![Download](dataset-640x640.zip) | 640x640 aligned dataset. | | 640x880 | 20 | ![Download](dataset-640x880.zip) | 640x880 aligned dataset. | | stage3-640 | 48 | ![Download](dataset-stage3-640.zip) | 3-stage cropped dataset with the shorter side not exceeding 640 pixels. | | stage3-800 | 48 | ![Download](dataset-stage3-800.zip) | 3-stage cropped dataset with the shorter side not exceeding 800 pixels. | | stage3-1200 | 48 | ![Download](dataset-stage3-1200.zip) | 3-stage cropped dataset with the shorter side not exceeding 1200 pixels. |
narugo/test_harem_v1.4
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "region:us" ]
2023-08-28T09:47:06+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art"]}
2023-08-28T09:52:21+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #region-us
Dataset of surtr (Arknights) ============================ This is the dataset of surtr (Arknights), containing 20 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization).
[]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #region-us \n" ]
[ 35 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #region-us \n" ]
2af2a19d532b21b1d1cee453e06bf6a59b5b0276
## Dataset Details **Dataset Type:** Japanese LLaVA Instruct 150K is a localized version of the original LLaVA Visual Instruct 150K dataset. This version is translated into Japanese using DeepL API and is aimed at serving similar purposes in the context of Japanese language. **Resources for More Information:** For information on the original dataset: [LLaVA Visual Instruct 150K](https://llava-vl.github.io/) **License:** Attribution-NonCommercial 4.0 International (CC BY-NC-4.0) The dataset should abide by the policy of OpenAI: [OpenAI Terms of Use](https://openai.com/policies/terms-of-use) **Questions or Comments:** For questions or comments about the original model, you can go to [LLaVA GitHub Issues](https://github.com/haotian-liu/LLaVA/issues). ## Intended Use **Primary Intended Uses:** The primary use of this translated dataset is research on large multimodal models and chatbots in a Japanese context. **Primary Intended Users:** The primary intended users are researchers and hobbyists interested in computer vision, natural language processing, machine learning, and artificial intelligence, particularly those focusing on the Japanese language. --- **Note:** This dataset is a translation of the original LLaVA Visual Instruct 150K, carried out using the DeepL API. The license remains the same as the original dataset, Attribution-NonCommercial 4.0 International (CC BY-NC-4.0). ---
turing-motors/LLaVA-Instruct-150K-JA
[ "task_categories:visual-question-answering", "task_categories:question-answering", "size_categories:100K<n<1M", "language:ja", "license:cc-by-nc-4.0", "region:us" ]
2023-08-28T09:50:24+00:00
{"language": ["ja"], "license": "cc-by-nc-4.0", "size_categories": ["100K<n<1M"], "task_categories": ["visual-question-answering", "question-answering"], "pretty_name": "Japanese LLaVA Visual Instruct 150K"}
2023-08-28T10:26:23+00:00
[]
[ "ja" ]
TAGS #task_categories-visual-question-answering #task_categories-question-answering #size_categories-100K<n<1M #language-Japanese #license-cc-by-nc-4.0 #region-us
## Dataset Details Dataset Type: Japanese LLaVA Instruct 150K is a localized version of the original LLaVA Visual Instruct 150K dataset. This version is translated into Japanese using DeepL API and is aimed at serving similar purposes in the context of Japanese language. Resources for More Information: For information on the original dataset: LLaVA Visual Instruct 150K License: Attribution-NonCommercial 4.0 International (CC BY-NC-4.0) The dataset should abide by the policy of OpenAI: OpenAI Terms of Use Questions or Comments: For questions or comments about the original model, you can go to LLaVA GitHub Issues. ## Intended Use Primary Intended Uses: The primary use of this translated dataset is research on large multimodal models and chatbots in a Japanese context. Primary Intended Users: The primary intended users are researchers and hobbyists interested in computer vision, natural language processing, machine learning, and artificial intelligence, particularly those focusing on the Japanese language. --- Note: This dataset is a translation of the original LLaVA Visual Instruct 150K, carried out using the DeepL API. The license remains the same as the original dataset, Attribution-NonCommercial 4.0 International (CC BY-NC-4.0). ---
[ "## Dataset Details\n\nDataset Type: \nJapanese LLaVA Instruct 150K is a localized version of the original LLaVA Visual Instruct 150K dataset. This version is translated into Japanese using DeepL API and is aimed at serving similar purposes in the context of Japanese language.\n\nResources for More Information: \nFor information on the original dataset: LLaVA Visual Instruct 150K\n\nLicense: \nAttribution-NonCommercial 4.0 International (CC BY-NC-4.0) \nThe dataset should abide by the policy of OpenAI: OpenAI Terms of Use\n\nQuestions or Comments: \nFor questions or comments about the original model, you can go to LLaVA GitHub Issues.", "## Intended Use\n\nPrimary Intended Uses: \nThe primary use of this translated dataset is research on large multimodal models and chatbots in a Japanese context.\n\nPrimary Intended Users: \nThe primary intended users are researchers and hobbyists interested in computer vision, natural language processing, machine learning, and artificial intelligence, particularly those focusing on the Japanese language.\n\n---\n\nNote: This dataset is a translation of the original LLaVA Visual Instruct 150K, carried out using the DeepL API. The license remains the same as the original dataset, Attribution-NonCommercial 4.0 International (CC BY-NC-4.0).\n\n---" ]
[ "TAGS\n#task_categories-visual-question-answering #task_categories-question-answering #size_categories-100K<n<1M #language-Japanese #license-cc-by-nc-4.0 #region-us \n", "## Dataset Details\n\nDataset Type: \nJapanese LLaVA Instruct 150K is a localized version of the original LLaVA Visual Instruct 150K dataset. This version is translated into Japanese using DeepL API and is aimed at serving similar purposes in the context of Japanese language.\n\nResources for More Information: \nFor information on the original dataset: LLaVA Visual Instruct 150K\n\nLicense: \nAttribution-NonCommercial 4.0 International (CC BY-NC-4.0) \nThe dataset should abide by the policy of OpenAI: OpenAI Terms of Use\n\nQuestions or Comments: \nFor questions or comments about the original model, you can go to LLaVA GitHub Issues.", "## Intended Use\n\nPrimary Intended Uses: \nThe primary use of this translated dataset is research on large multimodal models and chatbots in a Japanese context.\n\nPrimary Intended Users: \nThe primary intended users are researchers and hobbyists interested in computer vision, natural language processing, machine learning, and artificial intelligence, particularly those focusing on the Japanese language.\n\n---\n\nNote: This dataset is a translation of the original LLaVA Visual Instruct 150K, carried out using the DeepL API. The license remains the same as the original dataset, Attribution-NonCommercial 4.0 International (CC BY-NC-4.0).\n\n---" ]
[ 62, 149, 143 ]
[ "passage: TAGS\n#task_categories-visual-question-answering #task_categories-question-answering #size_categories-100K<n<1M #language-Japanese #license-cc-by-nc-4.0 #region-us \n## Dataset Details\n\nDataset Type: \nJapanese LLaVA Instruct 150K is a localized version of the original LLaVA Visual Instruct 150K dataset. This version is translated into Japanese using DeepL API and is aimed at serving similar purposes in the context of Japanese language.\n\nResources for More Information: \nFor information on the original dataset: LLaVA Visual Instruct 150K\n\nLicense: \nAttribution-NonCommercial 4.0 International (CC BY-NC-4.0) \nThe dataset should abide by the policy of OpenAI: OpenAI Terms of Use\n\nQuestions or Comments: \nFor questions or comments about the original model, you can go to LLaVA GitHub Issues.## Intended Use\n\nPrimary Intended Uses: \nThe primary use of this translated dataset is research on large multimodal models and chatbots in a Japanese context.\n\nPrimary Intended Users: \nThe primary intended users are researchers and hobbyists interested in computer vision, natural language processing, machine learning, and artificial intelligence, particularly those focusing on the Japanese language.\n\n---\n\nNote: This dataset is a translation of the original LLaVA Visual Instruct 150K, carried out using the DeepL API. The license remains the same as the original dataset, Attribution-NonCommercial 4.0 International (CC BY-NC-4.0).\n\n---" ]
e5a8287eac8623cddde23d660a02ed16a516f436
# Dataset Card for "HandDrawnID_processed_V1" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
temasarkisov/HandDrawnID_processed_V1
[ "region:us" ]
2023-08-28T09:58:20+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 3734766.0, "num_examples": 47}], "download_size": 3734638, "dataset_size": 3734766.0}}
2023-08-28T09:58:23+00:00
[]
[]
TAGS #region-us
# Dataset Card for "HandDrawnID_processed_V1" More Information needed
[ "# Dataset Card for \"HandDrawnID_processed_V1\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"HandDrawnID_processed_V1\"\n\nMore Information needed" ]
[ 6, 21 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"HandDrawnID_processed_V1\"\n\nMore Information needed" ]
f23c0e21f6fe222b4429272a86be00e4583b0298
# Dataset Card for "ffhq-256" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
merkol/ffhq-256
[ "region:us" ]
2023-08-28T10:17:10+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}], "splits": [{"name": "train", "num_bytes": 7358464050.0, "num_examples": 70000}], "download_size": 7407340570, "dataset_size": 7358464050.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-08-28T10:26:44+00:00
[]
[]
TAGS #region-us
# Dataset Card for "ffhq-256" More Information needed
[ "# Dataset Card for \"ffhq-256\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"ffhq-256\"\n\nMore Information needed" ]
[ 6, 15 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"ffhq-256\"\n\nMore Information needed" ]
6ccef35703378dd653db0e6c0b72cf6fee9f09af
# Dataset Card for "soict_train_dataset_with_WER" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
quocanh34/soict_train_dataset_with_WER
[ "region:us" ]
2023-08-28T10:25:54+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "sentence", "dtype": "string"}, {"name": "intent", "dtype": "string"}, {"name": "sentence_annotation", "dtype": "string"}, {"name": "entities", "list": [{"name": "type", "dtype": "string"}, {"name": "filler", "dtype": "string"}]}, {"name": "file", "dtype": "string"}, {"name": "audio", "struct": [{"name": "array", "sequence": "float64"}, {"name": "path", "dtype": "string"}, {"name": "sampling_rate", "dtype": "int64"}]}, {"name": "origin_transcription", "dtype": "string"}, {"name": "sentence_norm", "dtype": "string"}, {"name": "w2v2_base_5grams_transcription", "dtype": "string"}, {"name": "w2v2_large_5grams_transcription", "dtype": "string"}, {"name": "norm_w2v2_base", "dtype": "string"}, {"name": "norm_w2v2_large", "dtype": "string"}, {"name": "WER_base", "dtype": "int64"}, {"name": "WER_large", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 3482244499, "num_examples": 6722}], "download_size": 825637294, "dataset_size": 3482244499}}
2023-08-28T10:29:07+00:00
[]
[]
TAGS #region-us
# Dataset Card for "soict_train_dataset_with_WER" More Information needed
[ "# Dataset Card for \"soict_train_dataset_with_WER\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"soict_train_dataset_with_WER\"\n\nMore Information needed" ]
[ 6, 23 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"soict_train_dataset_with_WER\"\n\nMore Information needed" ]
407f63301694677dcb5cbd16ec74487d1884b368
# Dataset of Ithea Myse Valgulious This is the dataset of Ithea Myse Valgulious, containing 38 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). | Name | Images | Download | Description | |:------------|---------:|:------------------------------------|:-------------------------------------------------------------------------| | raw | 38 | [Download](dataset-raw.zip) | Raw data with meta information. | | raw-stage3 | 87 | [Download](dataset-raw-stage3.zip) | 3-stage cropped raw data with meta information. | | 384x512 | 38 | [Download](dataset-384x512.zip) | 384x512 aligned dataset. | | 512x512 | 38 | [Download](dataset-512x512.zip) | 512x512 aligned dataset. | | 512x704 | 38 | [Download](dataset-512x704.zip) | 512x704 aligned dataset. | | 640x640 | 38 | [Download](dataset-640x640.zip) | 640x640 aligned dataset. | | 640x880 | 38 | [Download](dataset-640x880.zip) | 640x880 aligned dataset. | | stage3-640 | 87 | [Download](dataset-stage3-640.zip) | 3-stage cropped dataset with the shorter side not exceeding 640 pixels. | | stage3-800 | 87 | [Download](dataset-stage3-800.zip) | 3-stage cropped dataset with the shorter side not exceeding 800 pixels. | | stage3-1200 | 87 | [Download](dataset-stage3-1200.zip) | 3-stage cropped dataset with the shorter side not exceeding 1200 pixels. |
CyberHarem/ithea_sukasuka
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-08-28T10:28:54+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2023-09-17T16:25:44+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of Ithea Myse Valgulious ================================ This is the dataset of Ithea Myse Valgulious, containing 38 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization).
[]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n" ]
[ 44 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n" ]
4e158e66bacce4080b7564f4bfd047f6b45642df
# Dataset Card for "soict_train_non_value_new" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
quocanh34/soict_train_non_value_new
[ "region:us" ]
2023-08-28T10:29:07+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "sentence", "dtype": "string"}, {"name": "intent", "dtype": "string"}, {"name": "sentence_annotation", "dtype": "string"}, {"name": "entities", "list": [{"name": "type", "dtype": "string"}, {"name": "filler", "dtype": "string"}]}, {"name": "file", "dtype": "string"}, {"name": "audio", "struct": [{"name": "array", "sequence": "float64"}, {"name": "path", "dtype": "string"}, {"name": "sampling_rate", "dtype": "int64"}]}, {"name": "origin_transcription", "dtype": "string"}, {"name": "sentence_norm", "dtype": "string"}, {"name": "w2v2_base_5grams_transcription", "dtype": "string"}, {"name": "w2v2_large_5grams_transcription", "dtype": "string"}], "splits": [{"name": "train"}], "download_size": 1881, "dataset_size": 0.0}}
2023-08-28T10:29:12+00:00
[]
[]
TAGS #region-us
# Dataset Card for "soict_train_non_value_new" More Information needed
[ "# Dataset Card for \"soict_train_non_value_new\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"soict_train_non_value_new\"\n\nMore Information needed" ]
[ 6, 23 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"soict_train_non_value_new\"\n\nMore Information needed" ]
065b3cbe7a3a23f88f0a31c0c8d7e81362a178c8
# Dataset Card for "autotree_automl_default-of-credit-card-clients_gosdt_l512_d3" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yzhuang/autotree_automl_default-of-credit-card-clients_gosdt_l512_d3
[ "region:us" ]
2023-08-28T10:55:04+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "input_x", "sequence": {"sequence": "float64"}}, {"name": "input_y", "sequence": {"sequence": "float32"}}, {"name": "rtg", "sequence": "float64"}, {"name": "status", "sequence": {"sequence": "float32"}}, {"name": "split_threshold", "sequence": {"sequence": "float64"}}, {"name": "split_dimension", "sequence": "int64"}], "splits": [{"name": "train", "num_bytes": 10863200000, "num_examples": 100000}, {"name": "validation", "num_bytes": 1086320000, "num_examples": 10000}], "download_size": 2035666668, "dataset_size": 11949520000}}
2023-08-28T10:56:32+00:00
[]
[]
TAGS #region-us
# Dataset Card for "autotree_automl_default-of-credit-card-clients_gosdt_l512_d3" More Information needed
[ "# Dataset Card for \"autotree_automl_default-of-credit-card-clients_gosdt_l512_d3\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"autotree_automl_default-of-credit-card-clients_gosdt_l512_d3\"\n\nMore Information needed" ]
[ 6, 36 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"autotree_automl_default-of-credit-card-clients_gosdt_l512_d3\"\n\nMore Information needed" ]
745b4a3d123f65ab0cac34a84a0528f6d2b67d1e
# OpenAssistant TOP-1 Conversation Threads - [Guanacco](https://huggingface.co/datasets/timdettmers/openassistant-guanaco) style export of the best conversation threads from the [open-assistant.io](https://open-assistant.io/) database - exported August 25, 2023 - jsonl files with [chatml](https://github.com/openai/openai-python/blob/main/chatml.md) formatted conversations - train: 12,947 samples / valid: 680 samples
OpenAssistant/oasst_top1_2023-08-25
[ "task_categories:conversational", "size_categories:10K<n<100K", "license:apache-2.0", "region:us" ]
2023-08-28T11:00:02+00:00
{"license": "apache-2.0", "size_categories": ["10K<n<100K"], "task_categories": ["conversational"]}
2023-08-28T11:44:26+00:00
[]
[]
TAGS #task_categories-conversational #size_categories-10K<n<100K #license-apache-2.0 #region-us
# OpenAssistant TOP-1 Conversation Threads - Guanacco style export of the best conversation threads from the URL database - exported August 25, 2023 - jsonl files with chatml formatted conversations - train: 12,947 samples / valid: 680 samples
[ "# OpenAssistant TOP-1 Conversation Threads\n\n- Guanacco style export of the best conversation threads from the URL database\n- exported August 25, 2023\n- jsonl files with chatml formatted conversations\n- train: 12,947 samples / valid: 680 samples" ]
[ "TAGS\n#task_categories-conversational #size_categories-10K<n<100K #license-apache-2.0 #region-us \n", "# OpenAssistant TOP-1 Conversation Threads\n\n- Guanacco style export of the best conversation threads from the URL database\n- exported August 25, 2023\n- jsonl files with chatml formatted conversations\n- train: 12,947 samples / valid: 680 samples" ]
[ 36, 63 ]
[ "passage: TAGS\n#task_categories-conversational #size_categories-10K<n<100K #license-apache-2.0 #region-us \n# OpenAssistant TOP-1 Conversation Threads\n\n- Guanacco style export of the best conversation threads from the URL database\n- exported August 25, 2023\n- jsonl files with chatml formatted conversations\n- train: 12,947 samples / valid: 680 samples" ]
f477f4c91943bf33e067361f73878f7e06bebe1b
Here you can find the solutions generated by of the Code Llama models to the HumanEval and multiPL-E benchmarks used in the Big Code models Leaderboard: https://huggingface.co/spaces/bigcode/bigcode-models-leaderboard.
bigcode/codellama-generations
[ "code", "region:us" ]
2023-08-28T11:45:10+00:00
{"tags": ["code"]}
2023-08-28T12:09:59+00:00
[]
[]
TAGS #code #region-us
Here you can find the solutions generated by of the Code Llama models to the HumanEval and multiPL-E benchmarks used in the Big Code models Leaderboard: URL
[]
[ "TAGS\n#code #region-us \n" ]
[ 8 ]
[ "passage: TAGS\n#code #region-us \n" ]
f08abd1a9de4f7f9bf97b79054d980efb70a50c1
# Kroh: Tonas `dataset_kel.txt`. <br> Tas tehst kroh: <br> `Tehst`→`, ant tehst nymer la.\nTehst ala ton.`
KE-AI/text-gen
[ "task_categories:text-generation", "task_categories:text2text-generation", "task_categories:conversational", "language:kel", "region:us" ]
2023-08-28T11:45:20+00:00
{"language": ["kel"], "task_categories": ["text-generation", "text2text-generation", "conversational"]}
2023-08-28T11:53:17+00:00
[]
[ "kel" ]
TAGS #task_categories-text-generation #task_categories-text2text-generation #task_categories-conversational #language-Kela (Democratic Republic of Congo) #region-us
# Kroh: Tonas 'dataset_kel.txt'. <br> Tas tehst kroh: <br> 'Tehst'→', ant tehst nymer la.\nTehst ala ton.'
[ "# Kroh:\nTonas 'dataset_kel.txt'.\n<br>\nTas tehst kroh:\n<br>\n'Tehst'→', ant tehst nymer la.\\nTehst ala ton.'" ]
[ "TAGS\n#task_categories-text-generation #task_categories-text2text-generation #task_categories-conversational #language-Kela (Democratic Republic of Congo) #region-us \n", "# Kroh:\nTonas 'dataset_kel.txt'.\n<br>\nTas tehst kroh:\n<br>\n'Tehst'→', ant tehst nymer la.\\nTehst ala ton.'" ]
[ 53, 51 ]
[ "passage: TAGS\n#task_categories-text-generation #task_categories-text2text-generation #task_categories-conversational #language-Kela (Democratic Republic of Congo) #region-us \n# Kroh:\nTonas 'dataset_kel.txt'.\n<br>\nTas tehst kroh:\n<br>\n'Tehst'→', ant tehst nymer la.\\nTehst ala ton.'" ]
1ac258953ce201c3b7b5d2c8b755951be53b4291
# Dataset Card for "arrow_dataset" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
iwahith/arrow_dataset
[ "region:us" ]
2023-08-28T12:22:09+00:00
{"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 29324, "num_examples": 13}], "download_size": 14904, "dataset_size": 29324}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-08-28T12:22:11+00:00
[]
[]
TAGS #region-us
# Dataset Card for "arrow_dataset" More Information needed
[ "# Dataset Card for \"arrow_dataset\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"arrow_dataset\"\n\nMore Information needed" ]
[ 6, 14 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"arrow_dataset\"\n\nMore Information needed" ]
1bd42398e02167595319caefc531225fd615e9b6
## 请前往 https://huggingface.co/datasets/Besteasy/CG-Eval 下载数据集 ## 评测数据集简介 LucyEval是甲骨易推出的中文大模型全面评测体系。CG-Eval是其中针对中文大模型生成能力的测试基准。 CG-Eval是甲骨易AI研究院与LanguageX AI Lab联合研发的针对中文大模型生成能力的测试基准。在此项测试中,受测的中文大语言模型需要对科技与工程、人文与社会科学、数学计算、医师资格考试、司法考试、注册会计师考试这六个大科目类别下的55个子科目的11000道不同类型问题做出准确且相关的回答。 我们设计了一套复合的打分系统,对于非计算题,每一道名词解释题和简答题都有标准参考答案,采用多个标准打分然后加权求和。对于计算题目,我们会提取最终计算结果和解题过程,然后综合打分。 数据集包括以下字段 大科目类别,子科目名称,题目类型, 题目编号,题目文本,题目答案的汉字长度,题目prompt ## 论文及数据集下载 CG-Eval论文 https://arxiv.org/abs/2308.04823<br> CG-Eval测试数据集下载地址 https://huggingface.co/datasets/Besteasy/CG-Eval<br> CG-Eval自动化评测地址 http://lucyeval.besteasy.com/<br> ## 评测方法 下载数据集后,请使用“题目prompt”列对应的提示词向模型提问,并在csv文件中增加“回答”列,存放模型的回复。请注意题目的回答要与提示词、问题编号、科目名称对应。 在收集到所有回答后,请将csv文件提交到评测网站 http://lucyeval.besteasy.com// 您需要提交的csv文件应具有以下字段: 大科目类别,子科目名称,题目类型, 题目编号,题目文本,题目答案的汉字长度,题目prompt,回答 网站会自动计算分数,您可以选择是否将分数同步到排行榜。 ## Citation If you find the code and testset are useful in your research, please consider citing ``` @misc{zeng2023evaluating, title={Evaluating the Generation Capabilities of Large Chinese Language Models}, author={Hui Zeng and Jingyuan Xue and Meng Hao and Chen Sun and Bin Ning and Na Zhang}, year={2023}, eprint={2308.04823}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` ## License The lucyeval dataset is licensed under a [Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License](http://creativecommons.org/licenses/by-nc-sa/4.0/).
Besteasy/lucyeval
[ "task_categories:text-generation", "size_categories:1M<n<10M", "language:zh", "license:cc-by-sa-4.0", "arxiv:2308.04823", "region:us" ]
2023-08-28T12:22:36+00:00
{"language": ["zh"], "license": "cc-by-sa-4.0", "size_categories": ["1M<n<10M"], "task_categories": ["text-generation"], "pretty_name": "lucyeval"}
2023-08-28T12:30:27+00:00
[ "2308.04823" ]
[ "zh" ]
TAGS #task_categories-text-generation #size_categories-1M<n<10M #language-Chinese #license-cc-by-sa-4.0 #arxiv-2308.04823 #region-us
## 请前往 URL 下载数据集 ## 评测数据集简介 LucyEval是甲骨易推出的中文大模型全面评测体系。CG-Eval是其中针对中文大模型生成能力的测试基准。 CG-Eval是甲骨易AI研究院与LanguageX AI Lab联合研发的针对中文大模型生成能力的测试基准。在此项测试中,受测的中文大语言模型需要对科技与工程、人文与社会科学、数学计算、医师资格考试、司法考试、注册会计师考试这六个大科目类别下的55个子科目的11000道不同类型问题做出准确且相关的回答。 我们设计了一套复合的打分系统,对于非计算题,每一道名词解释题和简答题都有标准参考答案,采用多个标准打分然后加权求和。对于计算题目,我们会提取最终计算结果和解题过程,然后综合打分。 数据集包括以下字段 大科目类别,子科目名称,题目类型, 题目编号,题目文本,题目答案的汉字长度,题目prompt ## 论文及数据集下载 CG-Eval论文 URL CG-Eval测试数据集下载地址 URL CG-Eval自动化评测地址 URL ## 评测方法 下载数据集后,请使用“题目prompt”列对应的提示词向模型提问,并在csv文件中增加“回答”列,存放模型的回复。请注意题目的回答要与提示词、问题编号、科目名称对应。 在收集到所有回答后,请将csv文件提交到评测网站 URL/ 您需要提交的csv文件应具有以下字段: 大科目类别,子科目名称,题目类型, 题目编号,题目文本,题目答案的汉字长度,题目prompt,回答 网站会自动计算分数,您可以选择是否将分数同步到排行榜。 If you find the code and testset are useful in your research, please consider citing ## License The lucyeval dataset is licensed under a Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License.
[ "## 请前往 URL 下载数据集", "## 评测数据集简介\nLucyEval是甲骨易推出的中文大模型全面评测体系。CG-Eval是其中针对中文大模型生成能力的测试基准。\n\nCG-Eval是甲骨易AI研究院与LanguageX AI Lab联合研发的针对中文大模型生成能力的测试基准。在此项测试中,受测的中文大语言模型需要对科技与工程、人文与社会科学、数学计算、医师资格考试、司法考试、注册会计师考试这六个大科目类别下的55个子科目的11000道不同类型问题做出准确且相关的回答。 我们设计了一套复合的打分系统,对于非计算题,每一道名词解释题和简答题都有标准参考答案,采用多个标准打分然后加权求和。对于计算题目,我们会提取最终计算结果和解题过程,然后综合打分。\n\n数据集包括以下字段\n大科目类别,子科目名称,题目类型, 题目编号,题目文本,题目答案的汉字长度,题目prompt", "## 论文及数据集下载\nCG-Eval论文 URL\nCG-Eval测试数据集下载地址 URL\nCG-Eval自动化评测地址 URL", "## 评测方法\n下载数据集后,请使用“题目prompt”列对应的提示词向模型提问,并在csv文件中增加“回答”列,存放模型的回复。请注意题目的回答要与提示词、问题编号、科目名称对应。 在收集到所有回答后,请将csv文件提交到评测网站 \nURL/\n\n您需要提交的csv文件应具有以下字段:\n\n大科目类别,子科目名称,题目类型, 题目编号,题目文本,题目答案的汉字长度,题目prompt,回答\n\n网站会自动计算分数,您可以选择是否将分数同步到排行榜。\n\nIf you find the code and testset are useful in your research, please consider citing", "## License\nThe lucyeval dataset is licensed under a Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License." ]
[ "TAGS\n#task_categories-text-generation #size_categories-1M<n<10M #language-Chinese #license-cc-by-sa-4.0 #arxiv-2308.04823 #region-us \n", "## 请前往 URL 下载数据集", "## 评测数据集简介\nLucyEval是甲骨易推出的中文大模型全面评测体系。CG-Eval是其中针对中文大模型生成能力的测试基准。\n\nCG-Eval是甲骨易AI研究院与LanguageX AI Lab联合研发的针对中文大模型生成能力的测试基准。在此项测试中,受测的中文大语言模型需要对科技与工程、人文与社会科学、数学计算、医师资格考试、司法考试、注册会计师考试这六个大科目类别下的55个子科目的11000道不同类型问题做出准确且相关的回答。 我们设计了一套复合的打分系统,对于非计算题,每一道名词解释题和简答题都有标准参考答案,采用多个标准打分然后加权求和。对于计算题目,我们会提取最终计算结果和解题过程,然后综合打分。\n\n数据集包括以下字段\n大科目类别,子科目名称,题目类型, 题目编号,题目文本,题目答案的汉字长度,题目prompt", "## 论文及数据集下载\nCG-Eval论文 URL\nCG-Eval测试数据集下载地址 URL\nCG-Eval自动化评测地址 URL", "## 评测方法\n下载数据集后,请使用“题目prompt”列对应的提示词向模型提问,并在csv文件中增加“回答”列,存放模型的回复。请注意题目的回答要与提示词、问题编号、科目名称对应。 在收集到所有回答后,请将csv文件提交到评测网站 \nURL/\n\n您需要提交的csv文件应具有以下字段:\n\n大科目类别,子科目名称,题目类型, 题目编号,题目文本,题目答案的汉字长度,题目prompt,回答\n\n网站会自动计算分数,您可以选择是否将分数同步到排行榜。\n\nIf you find the code and testset are useful in your research, please consider citing", "## License\nThe lucyeval dataset is licensed under a Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License." ]
[ 54, 9, 233, 35, 165, 28 ]
[ "passage: TAGS\n#task_categories-text-generation #size_categories-1M<n<10M #language-Chinese #license-cc-by-sa-4.0 #arxiv-2308.04823 #region-us \n## 请前往 URL 下载数据集## 评测数据集简介\nLucyEval是甲骨易推出的中文大模型全面评测体系。CG-Eval是其中针对中文大模型生成能力的测试基准。\n\nCG-Eval是甲骨易AI研究院与LanguageX AI Lab联合研发的针对中文大模型生成能力的测试基准。在此项测试中,受测的中文大语言模型需要对科技与工程、人文与社会科学、数学计算、医师资格考试、司法考试、注册会计师考试这六个大科目类别下的55个子科目的11000道不同类型问题做出准确且相关的回答。 我们设计了一套复合的打分系统,对于非计算题,每一道名词解释题和简答题都有标准参考答案,采用多个标准打分然后加权求和。对于计算题目,我们会提取最终计算结果和解题过程,然后综合打分。\n\n数据集包括以下字段\n大科目类别,子科目名称,题目类型, 题目编号,题目文本,题目答案的汉字长度,题目prompt## 论文及数据集下载\nCG-Eval论文 URL\nCG-Eval测试数据集下载地址 URL\nCG-Eval自动化评测地址 URL## 评测方法\n下载数据集后,请使用“题目prompt”列对应的提示词向模型提问,并在csv文件中增加“回答”列,存放模型的回复。请注意题目的回答要与提示词、问题编号、科目名称对应。 在收集到所有回答后,请将csv文件提交到评测网站 \nURL/\n\n您需要提交的csv文件应具有以下字段:\n\n大科目类别,子科目名称,题目类型, 题目编号,题目文本,题目答案的汉字长度,题目prompt,回答\n\n网站会自动计算分数,您可以选择是否将分数同步到排行榜。\n\nIf you find the code and testset are useful in your research, please consider citing" ]
99b2f3d1c0e395fa925c16046d72dee194a8df7f
<h1 align="center"> OVDEval </h1> <h2 align="center"> A Comprehensive Evaluation Benchmark for Open-Vocabulary Detection</h2> <p align="center"> <a href="https://arxiv.org/abs/2308.13177"><strong> [Paper 📄] </strong></a> </p> ## Dataset Description **OVDEval** is a new benchmark for OVD model, which includes 9 sub-tasks and introduces evaluations on commonsense knowledge, attribute understanding, position understanding, object relation comprehension, and more. The dataset is meticulously created to provide hard negatives that challenge models' true understanding of visual and linguistic input. Additionally, we identify a problem with the popular Average Precision (AP) metric when benchmarking models on these fine-grained label datasets and propose a new metric called **Non-Maximum Suppression Average Precision (NMS-AP)** to address this issue. ## Data Details ![image/png](https://cdn-uploads.huggingface.co/production/uploads/658a2e94991d8e7fb24f7688/ngOkek9wJdppyxPB0xZ8Q.png) ## Dataset Structure ```python { "categories": [ { "supercategory": "object", "id": 0, "name": "computer without screen on" }, { "supercategory": "object", "id": 1, "name": "computer with screen on" } ] "annotations": [ { "id": 0, "bbox": [ 111, 117, 99, 75 ], "category_id": 0, "image_id": 0, "iscrowd": 0, "area": 7523 }] "images": [ { "file_name": "64d22c6fe4b011b0db94b993.jpg", "id": 0, "height": 254, "width": 340, "text": [ "computer without screen on" # "text" represents the annotated positive labels of this image. ], "neg_text": [ "computer with screen on" # "neg_text" contains fine-grained hard negative labels which are generated according specific sub-tasks. ] }] } ``` ## How to use it Reference https://github.com/om-ai-lab/OVDEval ## Languages The dataset contains questions in English and code solutions in Python. ## Citation Information If you find our data, or code helpful, please cite the original paper: ``` @article{yao2023evaluate, title={How to Evaluate the Generalization of Detection? A Benchmark for Comprehensive Open-Vocabulary Detection}, author={Yao, Yiyang and Liu, Peng and Zhao, Tiancheng and Zhang, Qianqian and Liao, Jiajia and Fang, Chunxin and Lee, Kyusong and Wang, Qing}, journal={arXiv preprint arXiv:2308.13177}, year={2023} } ```
omlab/OVDEval
[ "license:openrail", "arxiv:2308.13177", "region:us" ]
2023-08-28T12:33:36+00:00
{"license": "openrail"}
2023-12-26T06:32:15+00:00
[ "2308.13177" ]
[]
TAGS #license-openrail #arxiv-2308.13177 #region-us
<h1 align="center"> OVDEval </h1> <h2 align="center"> A Comprehensive Evaluation Benchmark for Open-Vocabulary Detection</h2> <p align="center"> <a href="URL [Paper ] </strong></a> </p> ## Dataset Description OVDEval is a new benchmark for OVD model, which includes 9 sub-tasks and introduces evaluations on commonsense knowledge, attribute understanding, position understanding, object relation comprehension, and more. The dataset is meticulously created to provide hard negatives that challenge models' true understanding of visual and linguistic input. Additionally, we identify a problem with the popular Average Precision (AP) metric when benchmarking models on these fine-grained label datasets and propose a new metric called Non-Maximum Suppression Average Precision (NMS-AP) to address this issue. ## Data Details !image/png ## Dataset Structure ## How to use it Reference URL ## Languages The dataset contains questions in English and code solutions in Python. If you find our data, or code helpful, please cite the original paper:
[ "## Dataset Description\n\nOVDEval is a new benchmark for OVD model, which includes 9 sub-tasks and introduces evaluations on commonsense knowledge, attribute understanding, position understanding, object relation comprehension, and more. The dataset is meticulously created to provide hard negatives that challenge models' true understanding of visual and linguistic input. Additionally, we identify a problem with the popular Average Precision (AP) metric when benchmarking models on these fine-grained label datasets and propose a new metric called Non-Maximum Suppression Average Precision (NMS-AP) to address this issue.", "## Data Details\n\n!image/png", "## Dataset Structure", "## How to use it\n\nReference URL", "## Languages\n\nThe dataset contains questions in English and code solutions in Python.\n\n\nIf you find our data, or code helpful, please cite the original paper:" ]
[ "TAGS\n#license-openrail #arxiv-2308.13177 #region-us \n", "## Dataset Description\n\nOVDEval is a new benchmark for OVD model, which includes 9 sub-tasks and introduces evaluations on commonsense knowledge, attribute understanding, position understanding, object relation comprehension, and more. The dataset is meticulously created to provide hard negatives that challenge models' true understanding of visual and linguistic input. Additionally, we identify a problem with the popular Average Precision (AP) metric when benchmarking models on these fine-grained label datasets and propose a new metric called Non-Maximum Suppression Average Precision (NMS-AP) to address this issue.", "## Data Details\n\n!image/png", "## Dataset Structure", "## How to use it\n\nReference URL", "## Languages\n\nThe dataset contains questions in English and code solutions in Python.\n\n\nIf you find our data, or code helpful, please cite the original paper:" ]
[ 20, 141, 7, 6, 7, 33 ]
[ "passage: TAGS\n#license-openrail #arxiv-2308.13177 #region-us \n## Dataset Description\n\nOVDEval is a new benchmark for OVD model, which includes 9 sub-tasks and introduces evaluations on commonsense knowledge, attribute understanding, position understanding, object relation comprehension, and more. The dataset is meticulously created to provide hard negatives that challenge models' true understanding of visual and linguistic input. Additionally, we identify a problem with the popular Average Precision (AP) metric when benchmarking models on these fine-grained label datasets and propose a new metric called Non-Maximum Suppression Average Precision (NMS-AP) to address this issue.## Data Details\n\n!image/png## Dataset Structure## How to use it\n\nReference URL## Languages\n\nThe dataset contains questions in English and code solutions in Python.\n\n\nIf you find our data, or code helpful, please cite the original paper:" ]
8179aaa139e2e0398905219afa7eeeba7edd439e
# Dataset Card for "prepare_dataset_test" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
spsither/prepare_dataset_test
[ "region:us" ]
2023-08-28T12:48:00+00:00
{"dataset_info": {"features": [{"name": "input_features", "sequence": {"sequence": "float32"}}, {"name": "labels", "sequence": "int64"}], "splits": [{"name": "test", "num_bytes": 20179682112, "num_examples": 21007}], "download_size": 3254057884, "dataset_size": 20179682112}, "configs": [{"config_name": "default", "data_files": [{"split": "test", "path": "data/test-*"}]}]}
2023-08-28T13:18:13+00:00
[]
[]
TAGS #region-us
# Dataset Card for "prepare_dataset_test" More Information needed
[ "# Dataset Card for \"prepare_dataset_test\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"prepare_dataset_test\"\n\nMore Information needed" ]
[ 6, 17 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"prepare_dataset_test\"\n\nMore Information needed" ]
55f9aaaca08c5ea1185246813ca63251df4b26c5
# Dataset Card for Ginco ### Dataset Summary The Slovene Web genre identification corpus GINCO 1.0 contains 1,002 web texts (478,969 words), manually annotated with genres. The corpus allows for automated genre identification and genre analyses as well as other web corpora research. This dataset was extracted from the manually-annotated subcorpus (GINCO-1.0-suitable.json.zip) from the original GINCO dataset, published on the [CLARIN.SI repository](http://hdl.handle.net/11356/1467). The dataset is split into 602 training, 200 validation, and 200 test texts by the original authors. The texts in the suitable subset are annotated with up to three genre categories, where the primary label is the most prevalent, and secondary and tertiary labels denote presence of additional genre(s). The secondary and tertiary labels are available for multilabel classification, while for most use cases, we suggest that only the primary label is used. The labels are provided in three levels of detail (three category sets), allowing experiments with the full set (24 labels), set of 21 labels (labels with less than 5 instances are merged with label Other) and set of 12 labels (similar labels are merged). For most use cases, we suggest that the smallest set -- set of 12 labels is used (`primary_level_3`), and that the category "Other" is regarded as a "throw-away" category to detect texts for which the classifier could not predict any of the concrete labels. Additionally, the corpus contains some metadata about the text (e.g. url, domain, year) and its paragraphs (e.g. near-duplicates and their usefulness for the genre identification). More details on dataset construction, manual annotation and results of machine learning experiments are provided in the paper ["The GINCO Training Dataset for Web Genre Identification of Documents Out in the Wild"](https://aclanthology.org/2022.lrec-1.170/) (Kuzman et al., 2022). ### Languages Slovenian. ## Dataset Structure ### Data Instances A sample instance from the dataset: ``` { "id": "3776", "url": "http://www.radiocelje.si/novica.php?id=13007&amp;m=11&amp;l=2010", "crawled": "2014", "hard": false, "paragraphs": [ { "text": "V novembru, mesecu prepre\u010devanja odvisnosti, bodo \u010dlani Lokalne akcijske skupine za prepre\u010devanje zasvojenosti izvedli niz strokovnih predavanj za star\u0161e osnovno\u0161olcev v Celju...", "duplicate": false, "keep": true }, { "text": "Predavanja, ki jih bodo ta mesec organizirali na devetih osnovnih \u0161olah v mestni ob\u010dini Celje, so namenjena star\u0161em u\u010dencev od \u0161estega do devetega razreda. Program predavanj finan\u010dno podpira Mestna ob\u010dina Celje. Osrednja tema predavanj bodo varovalni dejavniki vzgoje, ki lahko pripomorejo k neuporabi drog. Po drogah, dovoljenih in nedovoljenih namre\u010d vse pogosteje posegajo \u017ee otroci. Na predavanjih se bodo star\u0161i seznanili tudi z informacijami o tem, na katere vedenjske in telesne spremembe naj bodo pozorni, kadar sumijo, da je otrok posegel po drogi. \u010ceprav je tema aktualna, saj poleg problemov odvisnosti osvetljuje ve\u0161\u010dine u\u010dinkovitega star\u0161evstva in komuniciranja z otroki v konfliktnih situacijah, se je lani ciklusa predavanj na osmih osnovnih \u0161olah udele\u017eilo le 160 star\u0161ev. Organizatorji tokrat upajo na bolj\u0161i odziv. Niz predavanj bodo izvedli strokovnjaki s podro\u010dja medicine, psihologije, socialnega dela in kriminologije. (ba)", "duplicate": false, "keep": true } ], "primary_level_1": "News/Reporting", "primary_level_2": "News/Reporting", "primary_level_3": "News/Reporting", "secondary_level_1": "", "secondary_level_2": "", "secondary_level_3": "", "tertiary_level_1": "", "tertiary_level_2": "", "tertiary_level_3": "", "split": "train", "domain": "www.radiocelje.si" } ``` ### Data Fields - 'id': id of the example; - 'url': exact URL from where the text originates; - 'crawled': the year, when the text has been obtained from the stated URL; - 'hard': whether it was difficult for a human to assign a genre to the text; - 'paragraphs': - 'text': text of the paragraph; - 'duplicate': true if the text is a near-duplicate; - 'keep': true, if the text is useful for the genre identification, false, if not; - 'primary_level_1': first genre category, most detailed category set; - 'primary_level_2': first genre category, category set where too infrequent categories are merged to Other; - 'primary_level_3': first genre category, compact and most useful category set; - 'secondary_level_1': second genre category, most detailed category set; - 'secondary_level_2': second genre category, category set where too infrequent categories are merged to Other; - 'secondary_level_3': second genre category, compact and most useful category set; - 'tertiary_level_1': third genre category, most detailed category set; - 'tertiary_level_2': third genre category, category set where too infrequent categories are merged to Other; - 'tertiary_level_3': third genre category, compact and most useful category set; - 'split': example can belong to the 'train', 'dev', or 'test' split; - 'domain': domain address of the website where the text originates from. #### Genre categories Texts are annotated using 24 genre categories: - News/Reporting, - Announcement, - Research Article, - Instruction, - Recipe, - Call (such as a Call for Papers), - Legal/Regulation, - Information/Explanation, - Opinionated News, - Review, - Opinion/Argumentation, - Promotion of a Product, - Promotion of Services, - Invitation, - Promotion, - Interview, - Forum, - Correspondence, - Script/Drama, - Prose, - Lyrical, - FAQ (Frequently Asked Questions), - List of Summaries/Excerpts, - Other. See the [Appendix in the paper](https://aclanthology.org/2022.lrec-1.170/) for descriptions of the labels. ## Additional Information ### Dataset Curators Kuzman, Taja ; Brglez, Mojca ; Rupnik, Peter and Ljubešić, Nikola ### Licensing Information CC BY-SA 4.0 ### Citation Information To cite the dataset: ``` @misc{11356/1467, title = {Slovene Web genre identification corpus {GINCO} 1.0}, author = {Kuzman, Taja and Brglez, Mojca and Rupnik, Peter and Ljube{\v s}i{\'c}, Nikola}, url = {http://hdl.handle.net/11356/1467}, note = {Slovenian language resource repository {CLARIN}.{SI}}, copyright = {Creative Commons - Attribution-{ShareAlike} 4.0 International ({CC} {BY}-{SA} 4.0)}, issn = {2820-4042}, year = {2021} } ``` To cite the paper on the dataset construction and manual annotation: ``` @inproceedings{kuzman2022ginco, title={The GINCO Training Dataset for Web Genre Identification of Documents Out in the Wild}, author={Kuzman, Taja and Rupnik, Peter and Ljube{\v{s}}i{\'c}, Nikola}, booktitle={Proceedings of the Thirteenth Language Resources and Evaluation Conference}, pages={1584--1594}, year={2022} } ``` ### Contributions Thanks to Hana Skitek for adding this dataset, and Taja Kuzman for extending the readme with additional information on the dataset.
cjvt/ginco
[ "task_categories:text-classification", "multilinguality:monolingual", "size_categories:1K<n<10K", "language:sl", "license:cc-by-sa-4.0", "region:us" ]
2023-08-28T13:12:30+00:00
{"language": ["sl"], "license": ["cc-by-sa-4.0"], "multilinguality": ["monolingual"], "size_categories": ["1K<n<10K"], "task_categories": ["text-classification"], "dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "url", "dtype": "string"}, {"name": "crawled", "dtype": "string"}, {"name": "hard", "dtype": "bool"}, {"name": "paragraphs", "list": [{"name": "text", "dtype": "string"}, {"name": "duplicate", "dtype": "bool"}, {"name": "keep", "dtype": "bool"}]}, {"name": "primary_level_1", "dtype": "string"}, {"name": "primary_level_2", "dtype": "string"}, {"name": "primary_level_3", "dtype": "string"}, {"name": "secondary_level_1", "dtype": "string"}, {"name": "secondary_level_2", "dtype": "string"}, {"name": "secondary_level_3", "dtype": "string"}, {"name": "tertiary_level_1", "dtype": "string"}, {"name": "tertiary_level_2", "dtype": "string"}, {"name": "tertiary_level_3", "dtype": "string"}, {"name": "split", "dtype": "string"}, {"name": "domain", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 2250345, "num_examples": 602}, {"name": "validation", "num_bytes": 657986, "num_examples": 200}, {"name": "test", "num_bytes": 550742, "num_examples": 200}], "download_size": 1424443, "dataset_size": 3459073}}
2024-01-29T10:15:53+00:00
[]
[ "sl" ]
TAGS #task_categories-text-classification #multilinguality-monolingual #size_categories-1K<n<10K #language-Slovenian #license-cc-by-sa-4.0 #region-us
# Dataset Card for Ginco ### Dataset Summary The Slovene Web genre identification corpus GINCO 1.0 contains 1,002 web texts (478,969 words), manually annotated with genres. The corpus allows for automated genre identification and genre analyses as well as other web corpora research. This dataset was extracted from the manually-annotated subcorpus (URL) from the original GINCO dataset, published on the CLARIN.SI repository. The dataset is split into 602 training, 200 validation, and 200 test texts by the original authors. The texts in the suitable subset are annotated with up to three genre categories, where the primary label is the most prevalent, and secondary and tertiary labels denote presence of additional genre(s). The secondary and tertiary labels are available for multilabel classification, while for most use cases, we suggest that only the primary label is used. The labels are provided in three levels of detail (three category sets), allowing experiments with the full set (24 labels), set of 21 labels (labels with less than 5 instances are merged with label Other) and set of 12 labels (similar labels are merged). For most use cases, we suggest that the smallest set -- set of 12 labels is used ('primary_level_3'), and that the category "Other" is regarded as a "throw-away" category to detect texts for which the classifier could not predict any of the concrete labels. Additionally, the corpus contains some metadata about the text (e.g. url, domain, year) and its paragraphs (e.g. near-duplicates and their usefulness for the genre identification). More details on dataset construction, manual annotation and results of machine learning experiments are provided in the paper "The GINCO Training Dataset for Web Genre Identification of Documents Out in the Wild" (Kuzman et al., 2022). ### Languages Slovenian. ## Dataset Structure ### Data Instances A sample instance from the dataset: ### Data Fields - 'id': id of the example; - 'url': exact URL from where the text originates; - 'crawled': the year, when the text has been obtained from the stated URL; - 'hard': whether it was difficult for a human to assign a genre to the text; - 'paragraphs': - 'text': text of the paragraph; - 'duplicate': true if the text is a near-duplicate; - 'keep': true, if the text is useful for the genre identification, false, if not; - 'primary_level_1': first genre category, most detailed category set; - 'primary_level_2': first genre category, category set where too infrequent categories are merged to Other; - 'primary_level_3': first genre category, compact and most useful category set; - 'secondary_level_1': second genre category, most detailed category set; - 'secondary_level_2': second genre category, category set where too infrequent categories are merged to Other; - 'secondary_level_3': second genre category, compact and most useful category set; - 'tertiary_level_1': third genre category, most detailed category set; - 'tertiary_level_2': third genre category, category set where too infrequent categories are merged to Other; - 'tertiary_level_3': third genre category, compact and most useful category set; - 'split': example can belong to the 'train', 'dev', or 'test' split; - 'domain': domain address of the website where the text originates from. #### Genre categories Texts are annotated using 24 genre categories: - News/Reporting, - Announcement, - Research Article, - Instruction, - Recipe, - Call (such as a Call for Papers), - Legal/Regulation, - Information/Explanation, - Opinionated News, - Review, - Opinion/Argumentation, - Promotion of a Product, - Promotion of Services, - Invitation, - Promotion, - Interview, - Forum, - Correspondence, - Script/Drama, - Prose, - Lyrical, - FAQ (Frequently Asked Questions), - List of Summaries/Excerpts, - Other. See the Appendix in the paper for descriptions of the labels. ## Additional Information ### Dataset Curators Kuzman, Taja ; Brglez, Mojca ; Rupnik, Peter and Ljubešić, Nikola ### Licensing Information CC BY-SA 4.0 To cite the dataset: To cite the paper on the dataset construction and manual annotation: ### Contributions Thanks to Hana Skitek for adding this dataset, and Taja Kuzman for extending the readme with additional information on the dataset.
[ "# Dataset Card for Ginco", "### Dataset Summary\n\nThe Slovene Web genre identification corpus GINCO 1.0 contains 1,002 web texts (478,969 words), manually annotated with genres.\nThe corpus allows for automated genre identification and genre analyses as well as other web corpora research.\n\nThis dataset was extracted from the manually-annotated subcorpus (URL) from the original GINCO dataset,\npublished on the CLARIN.SI repository.\n\nThe dataset is split into 602 training, 200 validation, and 200 test texts by the original authors.\n\nThe texts in the suitable subset are annotated with up to three genre categories,\nwhere the primary label is the most prevalent, and secondary and tertiary labels denote presence of additional genre(s).\nThe secondary and tertiary labels are available for multilabel classification,\nwhile for most use cases, we suggest that only the primary label is used.\n\nThe labels are provided in three levels of detail (three category sets), allowing experiments with the full set (24 labels),\nset of 21 labels (labels with less than 5 instances are merged with label Other) and set of 12 labels (similar labels are merged).\nFor most use cases, we suggest that the smallest set -- set of 12 labels is used ('primary_level_3'),\nand that the category \"Other\" is regarded as a \"throw-away\" category to detect texts\nfor which the classifier could not predict any of the concrete labels.\n\nAdditionally, the corpus contains some metadata about the text (e.g. url, domain, year)\nand its paragraphs (e.g. near-duplicates and their usefulness for the genre identification).\n\nMore details on dataset construction, manual annotation and results of machine learning experiments\nare provided in the paper \"The GINCO Training Dataset for Web Genre Identification of Documents Out in the Wild\" (Kuzman et al., 2022).", "### Languages\n\nSlovenian.", "## Dataset Structure", "### Data Instances\n\nA sample instance from the dataset:", "### Data Fields\n\n- 'id': id of the example;\n- 'url': exact URL from where the text originates;\n- 'crawled': the year, when the text has been obtained from the stated URL;\n- 'hard': whether it was difficult for a human to assign a genre to the text;\n- 'paragraphs':\n - 'text': text of the paragraph;\n - 'duplicate': true if the text is a near-duplicate;\n - 'keep': true, if the text is useful for the genre identification, false, if not;\n- 'primary_level_1': first genre category, most detailed category set;\n- 'primary_level_2': first genre category, category set where too infrequent categories are merged to Other;\n- 'primary_level_3': first genre category, compact and most useful category set;\n- 'secondary_level_1': second genre category, most detailed category set;\n- 'secondary_level_2': second genre category, category set where too infrequent categories are merged to Other;\n- 'secondary_level_3': second genre category, compact and most useful category set;\n- 'tertiary_level_1': third genre category, most detailed category set;\n- 'tertiary_level_2': third genre category, category set where too infrequent categories are merged to Other;\n- 'tertiary_level_3': third genre category, compact and most useful category set;\n- 'split': example can belong to the 'train', 'dev', or 'test' split;\n- 'domain': domain address of the website where the text originates from.", "#### Genre categories\nTexts are annotated using 24 genre categories:\n- News/Reporting,\n- Announcement,\n- Research Article,\n- Instruction,\n- Recipe,\n- Call (such as a Call for Papers),\n- Legal/Regulation,\n- Information/Explanation,\n- Opinionated News,\n- Review,\n- Opinion/Argumentation,\n- Promotion of a Product,\n- Promotion of Services,\n- Invitation,\n- Promotion,\n- Interview,\n- Forum,\n- Correspondence,\n- Script/Drama,\n- Prose,\n- Lyrical,\n- FAQ (Frequently Asked Questions),\n- List of Summaries/Excerpts,\n- Other.\n\nSee the Appendix in the paper for descriptions of the labels.", "## Additional Information", "### Dataset Curators\n\nKuzman, Taja ; Brglez, Mojca ; Rupnik, Peter and Ljubešić, Nikola", "### Licensing Information\n\nCC BY-SA 4.0\n\n\n\nTo cite the dataset:\n\n\n\nTo cite the paper on the dataset construction and manual annotation:", "### Contributions\n\nThanks to Hana Skitek for adding this dataset, and Taja Kuzman for extending the readme with additional information on the dataset." ]
[ "TAGS\n#task_categories-text-classification #multilinguality-monolingual #size_categories-1K<n<10K #language-Slovenian #license-cc-by-sa-4.0 #region-us \n", "# Dataset Card for Ginco", "### Dataset Summary\n\nThe Slovene Web genre identification corpus GINCO 1.0 contains 1,002 web texts (478,969 words), manually annotated with genres.\nThe corpus allows for automated genre identification and genre analyses as well as other web corpora research.\n\nThis dataset was extracted from the manually-annotated subcorpus (URL) from the original GINCO dataset,\npublished on the CLARIN.SI repository.\n\nThe dataset is split into 602 training, 200 validation, and 200 test texts by the original authors.\n\nThe texts in the suitable subset are annotated with up to three genre categories,\nwhere the primary label is the most prevalent, and secondary and tertiary labels denote presence of additional genre(s).\nThe secondary and tertiary labels are available for multilabel classification,\nwhile for most use cases, we suggest that only the primary label is used.\n\nThe labels are provided in three levels of detail (three category sets), allowing experiments with the full set (24 labels),\nset of 21 labels (labels with less than 5 instances are merged with label Other) and set of 12 labels (similar labels are merged).\nFor most use cases, we suggest that the smallest set -- set of 12 labels is used ('primary_level_3'),\nand that the category \"Other\" is regarded as a \"throw-away\" category to detect texts\nfor which the classifier could not predict any of the concrete labels.\n\nAdditionally, the corpus contains some metadata about the text (e.g. url, domain, year)\nand its paragraphs (e.g. near-duplicates and their usefulness for the genre identification).\n\nMore details on dataset construction, manual annotation and results of machine learning experiments\nare provided in the paper \"The GINCO Training Dataset for Web Genre Identification of Documents Out in the Wild\" (Kuzman et al., 2022).", "### Languages\n\nSlovenian.", "## Dataset Structure", "### Data Instances\n\nA sample instance from the dataset:", "### Data Fields\n\n- 'id': id of the example;\n- 'url': exact URL from where the text originates;\n- 'crawled': the year, when the text has been obtained from the stated URL;\n- 'hard': whether it was difficult for a human to assign a genre to the text;\n- 'paragraphs':\n - 'text': text of the paragraph;\n - 'duplicate': true if the text is a near-duplicate;\n - 'keep': true, if the text is useful for the genre identification, false, if not;\n- 'primary_level_1': first genre category, most detailed category set;\n- 'primary_level_2': first genre category, category set where too infrequent categories are merged to Other;\n- 'primary_level_3': first genre category, compact and most useful category set;\n- 'secondary_level_1': second genre category, most detailed category set;\n- 'secondary_level_2': second genre category, category set where too infrequent categories are merged to Other;\n- 'secondary_level_3': second genre category, compact and most useful category set;\n- 'tertiary_level_1': third genre category, most detailed category set;\n- 'tertiary_level_2': third genre category, category set where too infrequent categories are merged to Other;\n- 'tertiary_level_3': third genre category, compact and most useful category set;\n- 'split': example can belong to the 'train', 'dev', or 'test' split;\n- 'domain': domain address of the website where the text originates from.", "#### Genre categories\nTexts are annotated using 24 genre categories:\n- News/Reporting,\n- Announcement,\n- Research Article,\n- Instruction,\n- Recipe,\n- Call (such as a Call for Papers),\n- Legal/Regulation,\n- Information/Explanation,\n- Opinionated News,\n- Review,\n- Opinion/Argumentation,\n- Promotion of a Product,\n- Promotion of Services,\n- Invitation,\n- Promotion,\n- Interview,\n- Forum,\n- Correspondence,\n- Script/Drama,\n- Prose,\n- Lyrical,\n- FAQ (Frequently Asked Questions),\n- List of Summaries/Excerpts,\n- Other.\n\nSee the Appendix in the paper for descriptions of the labels.", "## Additional Information", "### Dataset Curators\n\nKuzman, Taja ; Brglez, Mojca ; Rupnik, Peter and Ljubešić, Nikola", "### Licensing Information\n\nCC BY-SA 4.0\n\n\n\nTo cite the dataset:\n\n\n\nTo cite the paper on the dataset construction and manual annotation:", "### Contributions\n\nThanks to Hana Skitek for adding this dataset, and Taja Kuzman for extending the readme with additional information on the dataset." ]
[ 53, 7, 446, 7, 6, 14, 387, 169, 5, 29, 31, 37 ]
[ "passage: TAGS\n#task_categories-text-classification #multilinguality-monolingual #size_categories-1K<n<10K #language-Slovenian #license-cc-by-sa-4.0 #region-us \n# Dataset Card for Ginco### Dataset Summary\n\nThe Slovene Web genre identification corpus GINCO 1.0 contains 1,002 web texts (478,969 words), manually annotated with genres.\nThe corpus allows for automated genre identification and genre analyses as well as other web corpora research.\n\nThis dataset was extracted from the manually-annotated subcorpus (URL) from the original GINCO dataset,\npublished on the CLARIN.SI repository.\n\nThe dataset is split into 602 training, 200 validation, and 200 test texts by the original authors.\n\nThe texts in the suitable subset are annotated with up to three genre categories,\nwhere the primary label is the most prevalent, and secondary and tertiary labels denote presence of additional genre(s).\nThe secondary and tertiary labels are available for multilabel classification,\nwhile for most use cases, we suggest that only the primary label is used.\n\nThe labels are provided in three levels of detail (three category sets), allowing experiments with the full set (24 labels),\nset of 21 labels (labels with less than 5 instances are merged with label Other) and set of 12 labels (similar labels are merged).\nFor most use cases, we suggest that the smallest set -- set of 12 labels is used ('primary_level_3'),\nand that the category \"Other\" is regarded as a \"throw-away\" category to detect texts\nfor which the classifier could not predict any of the concrete labels.\n\nAdditionally, the corpus contains some metadata about the text (e.g. url, domain, year)\nand its paragraphs (e.g. near-duplicates and their usefulness for the genre identification).\n\nMore details on dataset construction, manual annotation and results of machine learning experiments\nare provided in the paper \"The GINCO Training Dataset for Web Genre Identification of Documents Out in the Wild\" (Kuzman et al., 2022).", "passage: ### Languages\n\nSlovenian.## Dataset Structure### Data Instances\n\nA sample instance from the dataset:### Data Fields\n\n- 'id': id of the example;\n- 'url': exact URL from where the text originates;\n- 'crawled': the year, when the text has been obtained from the stated URL;\n- 'hard': whether it was difficult for a human to assign a genre to the text;\n- 'paragraphs':\n - 'text': text of the paragraph;\n - 'duplicate': true if the text is a near-duplicate;\n - 'keep': true, if the text is useful for the genre identification, false, if not;\n- 'primary_level_1': first genre category, most detailed category set;\n- 'primary_level_2': first genre category, category set where too infrequent categories are merged to Other;\n- 'primary_level_3': first genre category, compact and most useful category set;\n- 'secondary_level_1': second genre category, most detailed category set;\n- 'secondary_level_2': second genre category, category set where too infrequent categories are merged to Other;\n- 'secondary_level_3': second genre category, compact and most useful category set;\n- 'tertiary_level_1': third genre category, most detailed category set;\n- 'tertiary_level_2': third genre category, category set where too infrequent categories are merged to Other;\n- 'tertiary_level_3': third genre category, compact and most useful category set;\n- 'split': example can belong to the 'train', 'dev', or 'test' split;\n- 'domain': domain address of the website where the text originates from." ]
2c3099dbb95e50ce7048a93e4aec7066750f3f8c
# Dataset Card for Evaluation run of jondurbin/airoboros-c34b-2.1 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/jondurbin/airoboros-c34b-2.1 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [jondurbin/airoboros-c34b-2.1](https://huggingface.co/jondurbin/airoboros-c34b-2.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_jondurbin__airoboros-c34b-2.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-22T21:16:11.848472](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-c34b-2.1/blob/main/results_2023-10-22T21-16-11.848472.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.2950922818791946, "em_stderr": 0.004670729426706433, "f1": 0.35763003355704864, "f1_stderr": 0.004615741016305116, "acc": 0.38384505972750876, "acc_stderr": 0.010339372510053756 }, "harness|drop|3": { "em": 0.2950922818791946, "em_stderr": 0.004670729426706433, "f1": 0.35763003355704864, "f1_stderr": 0.004615741016305116 }, "harness|gsm8k|5": { "acc": 0.08339651250947688, "acc_stderr": 0.0076156502771067 }, "harness|winogrande|5": { "acc": 0.6842936069455406, "acc_stderr": 0.01306309474300081 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_jondurbin__airoboros-c34b-2.1
[ "region:us" ]
2023-08-28T13:18:16+00:00
{"pretty_name": "Evaluation run of jondurbin/airoboros-c34b-2.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [jondurbin/airoboros-c34b-2.1](https://huggingface.co/jondurbin/airoboros-c34b-2.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jondurbin__airoboros-c34b-2.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-22T21:16:11.848472](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-c34b-2.1/blob/main/results_2023-10-22T21-16-11.848472.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.2950922818791946,\n \"em_stderr\": 0.004670729426706433,\n \"f1\": 0.35763003355704864,\n \"f1_stderr\": 0.004615741016305116,\n \"acc\": 0.38384505972750876,\n \"acc_stderr\": 0.010339372510053756\n },\n \"harness|drop|3\": {\n \"em\": 0.2950922818791946,\n \"em_stderr\": 0.004670729426706433,\n \"f1\": 0.35763003355704864,\n \"f1_stderr\": 0.004615741016305116\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.08339651250947688,\n \"acc_stderr\": 0.0076156502771067\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6842936069455406,\n \"acc_stderr\": 0.01306309474300081\n }\n}\n```", "repo_url": "https://huggingface.co/jondurbin/airoboros-c34b-2.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|arc:challenge|25_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_22T21_16_11.848472", "path": ["**/details_harness|drop|3_2023-10-22T21-16-11.848472.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-22T21-16-11.848472.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_22T21_16_11.848472", "path": ["**/details_harness|gsm8k|5_2023-10-22T21-16-11.848472.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-22T21-16-11.848472.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hellaswag|10_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-28T14:17:53.693745.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-28T14:17:53.693745.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-28T14:17:53.693745.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_22T21_16_11.848472", "path": ["**/details_harness|winogrande|5_2023-10-22T21-16-11.848472.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-22T21-16-11.848472.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_28T14_17_53.693745", "path": ["results_2023-08-28T14:17:53.693745.parquet"]}, {"split": "2023_10_22T21_16_11.848472", "path": ["results_2023-10-22T21-16-11.848472.parquet"]}, {"split": "latest", "path": ["results_2023-10-22T21-16-11.848472.parquet"]}]}]}
2023-10-22T20:16:24+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of jondurbin/airoboros-c34b-2.1 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model jondurbin/airoboros-c34b-2.1 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-22T21:16:11.848472(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of jondurbin/airoboros-c34b-2.1", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model jondurbin/airoboros-c34b-2.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-22T21:16:11.848472(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of jondurbin/airoboros-c34b-2.1", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model jondurbin/airoboros-c34b-2.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-22T21:16:11.848472(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 22, 31, 170, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of jondurbin/airoboros-c34b-2.1## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model jondurbin/airoboros-c34b-2.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-22T21:16:11.848472(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
eb6a395d9bbbb78a53bba7fcfdb6c63958e428f9
# Dataset Card for Evaluation run of WizardLM/WizardCoder-Python-34B-V1.0 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/WizardLM/WizardCoder-Python-34B-V1.0 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [WizardLM/WizardCoder-Python-34B-V1.0](https://huggingface.co/WizardLM/WizardCoder-Python-34B-V1.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_WizardLM__WizardCoder-Python-34B-V1.0", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-30T13:26:26.501307](https://huggingface.co/datasets/open-llm-leaderboard/details_WizardLM__WizardCoder-Python-34B-V1.0/blob/main/results_2023-09-30T13-26-26.501307.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.19379194630872484, "em_stderr": 0.004047912159759954, "f1": 0.2506229026845643, "f1_stderr": 0.0041031622757888245, "acc": 0.38913655258910956, "acc_stderr": 0.010569829944033455 }, "harness|drop|3": { "em": 0.19379194630872484, "em_stderr": 0.004047912159759954, "f1": 0.2506229026845643, "f1_stderr": 0.0041031622757888245 }, "harness|gsm8k|5": { "acc": 0.09476876421531463, "acc_stderr": 0.008067791560015424 }, "harness|winogrande|5": { "acc": 0.6835043409629045, "acc_stderr": 0.013071868328051487 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_WizardLM__WizardCoder-Python-34B-V1.0
[ "region:us" ]
2023-08-28T13:25:10+00:00
{"pretty_name": "Evaluation run of WizardLM/WizardCoder-Python-34B-V1.0", "dataset_summary": "Dataset automatically created during the evaluation run of model [WizardLM/WizardCoder-Python-34B-V1.0](https://huggingface.co/WizardLM/WizardCoder-Python-34B-V1.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_WizardLM__WizardCoder-Python-34B-V1.0\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-30T13:26:26.501307](https://huggingface.co/datasets/open-llm-leaderboard/details_WizardLM__WizardCoder-Python-34B-V1.0/blob/main/results_2023-09-30T13-26-26.501307.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.19379194630872484,\n \"em_stderr\": 0.004047912159759954,\n \"f1\": 0.2506229026845643,\n \"f1_stderr\": 0.0041031622757888245,\n \"acc\": 0.38913655258910956,\n \"acc_stderr\": 0.010569829944033455\n },\n \"harness|drop|3\": {\n \"em\": 0.19379194630872484,\n \"em_stderr\": 0.004047912159759954,\n \"f1\": 0.2506229026845643,\n \"f1_stderr\": 0.0041031622757888245\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.09476876421531463,\n \"acc_stderr\": 0.008067791560015424\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6835043409629045,\n \"acc_stderr\": 0.013071868328051487\n }\n}\n```", "repo_url": "https://huggingface.co/WizardLM/WizardCoder-Python-34B-V1.0", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|arc:challenge|25_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|arc:challenge|25_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_30T13_26_26.501307", "path": ["**/details_harness|drop|3_2023-09-30T13-26-26.501307.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-30T13-26-26.501307.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_30T13_26_26.501307", "path": ["**/details_harness|gsm8k|5_2023-09-30T13-26-26.501307.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-30T13-26-26.501307.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hellaswag|10_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hellaswag|10_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-28T14:24:48.520314.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-30T15:50:41.710615.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-30T15:50:41.710615.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-30T15:50:41.710615.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_30T13_26_26.501307", "path": ["**/details_harness|winogrande|5_2023-09-30T13-26-26.501307.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-30T13-26-26.501307.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_28T14_24_48.520314", "path": ["results_2023-08-28T14:24:48.520314.parquet"]}, {"split": "2023_08_30T15_50_41.710615", "path": ["results_2023-08-30T15:50:41.710615.parquet"]}, {"split": "2023_09_30T13_26_26.501307", "path": ["results_2023-09-30T13-26-26.501307.parquet"]}, {"split": "latest", "path": ["results_2023-09-30T13-26-26.501307.parquet"]}]}]}
2023-09-30T12:26:38+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of WizardLM/WizardCoder-Python-34B-V1.0 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model WizardLM/WizardCoder-Python-34B-V1.0 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-30T13:26:26.501307(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of WizardLM/WizardCoder-Python-34B-V1.0", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model WizardLM/WizardCoder-Python-34B-V1.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-30T13:26:26.501307(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of WizardLM/WizardCoder-Python-34B-V1.0", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model WizardLM/WizardCoder-Python-34B-V1.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-30T13:26:26.501307(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 26, 31, 174, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of WizardLM/WizardCoder-Python-34B-V1.0## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model WizardLM/WizardCoder-Python-34B-V1.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-30T13:26:26.501307(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
ef807c55501456ed885ca65cb070e817b6b9d43d
OpenOrca but just the GPT4 bits.
rwkv-x-dev/openorca-gpt4
[ "region:us" ]
2023-08-28T13:38:13+00:00
{"pretty_name": "OpenOrca", "configs": [{"config_name": "default", "default": true, "data_files": [{"split": "train", "path": ["*.parquet"]}]}]}
2023-08-28T13:44:06+00:00
[]
[]
TAGS #region-us
OpenOrca but just the GPT4 bits.
[]
[ "TAGS\n#region-us \n" ]
[ 6 ]
[ "passage: TAGS\n#region-us \n" ]
84e5cf3bcd6acb3dfc70b6760451645872218a3e
# Evaluation dataset for the paper "A Holistic Approach to Undesired Content Detection" The evaluation dataset `data/samples-1680.jsonl.gz` is the test set used in [this paper](https://arxiv.org/abs/2208.03274). Each line contains information about one sample in a JSON object and each sample is labeled according to our taxonomy. The category label is a binary flag, but if it does not include in the JSON, it means we do not know the label. | Category | Label | Definition | | -------- | ----- | ---------- | | sexual | `S` | Content meant to arouse sexual excitement, such as the description of sexual activity, or that promotes sexual services (excluding sex education and wellness). | | hate | `H` | Content that expresses, incites, or promotes hate based on race, gender, ethnicity, religion, nationality, sexual orientation, disability status, or caste. | | violence | `V` | Content that promotes or glorifies violence or celebrates the suffering or humiliation of others. | | harassment | `HR` | Content that may be used to torment or annoy individuals in real life, or make harassment more likely to occur. | | self-harm | `SH` | Content that promotes, encourages, or depicts acts of self-harm, such as suicide, cutting, and eating disorders. | | sexual/minors | `S3` | Sexual content that includes an individual who is under 18 years old. | | hate/threatening | `H2` | Hateful content that also includes violence or serious harm towards the targeted group. | | violence/graphic | `V2` | Violent content that depicts death, violence, or serious physical injury in extreme graphic detail. | Parsed from the GitHub repo: https://github.com/openai/moderation-api-release
mmathys/openai-moderation-api-evaluation
[ "task_categories:text-classification", "language:en", "license:mit", "arxiv:2208.03274", "region:us" ]
2023-08-28T13:50:57+00:00
{"language": "en", "license": "mit", "task_categories": ["text-classification"], "pretty_name": "OpenAI Moderation API Validation Dataset"}
2023-08-28T13:56:39+00:00
[ "2208.03274" ]
[ "en" ]
TAGS #task_categories-text-classification #language-English #license-mit #arxiv-2208.03274 #region-us
Evaluation dataset for the paper "A Holistic Approach to Undesired Content Detection" ===================================================================================== The evaluation dataset 'data/URL' is the test set used in this paper. Each line contains information about one sample in a JSON object and each sample is labeled according to our taxonomy. The category label is a binary flag, but if it does not include in the JSON, it means we do not know the label. Category: sexual, Label: 'S', Definition: Content meant to arouse sexual excitement, such as the description of sexual activity, or that promotes sexual services (excluding sex education and wellness). Category: hate, Label: 'H', Definition: Content that expresses, incites, or promotes hate based on race, gender, ethnicity, religion, nationality, sexual orientation, disability status, or caste. Category: violence, Label: 'V', Definition: Content that promotes or glorifies violence or celebrates the suffering or humiliation of others. Category: harassment, Label: 'HR', Definition: Content that may be used to torment or annoy individuals in real life, or make harassment more likely to occur. Category: self-harm, Label: 'SH', Definition: Content that promotes, encourages, or depicts acts of self-harm, such as suicide, cutting, and eating disorders. Category: sexual/minors, Label: 'S3', Definition: Sexual content that includes an individual who is under 18 years old. Category: hate/threatening, Label: 'H2', Definition: Hateful content that also includes violence or serious harm towards the targeted group. Category: violence/graphic, Label: 'V2', Definition: Violent content that depicts death, violence, or serious physical injury in extreme graphic detail. Parsed from the GitHub repo: URL
[]
[ "TAGS\n#task_categories-text-classification #language-English #license-mit #arxiv-2208.03274 #region-us \n" ]
[ 35 ]
[ "passage: TAGS\n#task_categories-text-classification #language-English #license-mit #arxiv-2208.03274 #region-us \n" ]
071f742c67b62b7061c2abdee66cbcf4dac5b29f
# Dataset Card for "embedding_20k_test" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Nazzaroth2/embedding_20k_test
[ "region:us" ]
2023-08-28T14:03:24+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "dataset_info": {"features": [{"name": "lang", "dtype": "string"}, {"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 2446153, "num_examples": 40000}], "download_size": 1319617, "dataset_size": 2446153}}
2023-08-28T14:03:29+00:00
[]
[]
TAGS #region-us
# Dataset Card for "embedding_20k_test" More Information needed
[ "# Dataset Card for \"embedding_20k_test\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"embedding_20k_test\"\n\nMore Information needed" ]
[ 6, 17 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"embedding_20k_test\"\n\nMore Information needed" ]
abfca7d6c30be3e1aad7a4f25e1d37540d9328ad
# Dataset of Rhantolk Ytri Historia This is the dataset of Rhantolk Ytri Historia, containing 33 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). | Name | Images | Download | Description | |:------------|---------:|:------------------------------------|:-------------------------------------------------------------------------| | raw | 33 | [Download](dataset-raw.zip) | Raw data with meta information. | | raw-stage3 | 66 | [Download](dataset-raw-stage3.zip) | 3-stage cropped raw data with meta information. | | 384x512 | 33 | [Download](dataset-384x512.zip) | 384x512 aligned dataset. | | 512x512 | 33 | [Download](dataset-512x512.zip) | 512x512 aligned dataset. | | 512x704 | 33 | [Download](dataset-512x704.zip) | 512x704 aligned dataset. | | 640x640 | 33 | [Download](dataset-640x640.zip) | 640x640 aligned dataset. | | 640x880 | 33 | [Download](dataset-640x880.zip) | 640x880 aligned dataset. | | stage3-640 | 66 | [Download](dataset-stage3-640.zip) | 3-stage cropped dataset with the shorter side not exceeding 640 pixels. | | stage3-800 | 66 | [Download](dataset-stage3-800.zip) | 3-stage cropped dataset with the shorter side not exceeding 800 pixels. | | stage3-1200 | 66 | [Download](dataset-stage3-1200.zip) | 3-stage cropped dataset with the shorter side not exceeding 1200 pixels. |
CyberHarem/rhantolk_sukasuka
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-08-28T14:18:26+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2023-09-17T16:25:46+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of Rhantolk Ytri Historia ================================= This is the dataset of Rhantolk Ytri Historia, containing 33 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization).
[]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n" ]
[ 44 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n" ]
e416880e526ae7ab60d470f7ea45223939ea4e33
# Dataset Card for "named-necklace-dataset" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
cto-algo-huggingface/named-necklace-dataset-with-original-canny-sobel-caption
[ "region:us" ]
2023-08-28T14:23:03+00:00
{"dataset_info": {"features": [{"name": "original", "dtype": "image"}, {"name": "canny", "dtype": "image"}, {"name": "sobel", "dtype": "image"}, {"name": "caption", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 24199890.0, "num_examples": 97}], "download_size": 21932929, "dataset_size": 24199890.0}}
2023-08-28T20:52:54+00:00
[]
[]
TAGS #region-us
# Dataset Card for "named-necklace-dataset" More Information needed
[ "# Dataset Card for \"named-necklace-dataset\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"named-necklace-dataset\"\n\nMore Information needed" ]
[ 6, 19 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"named-necklace-dataset\"\n\nMore Information needed" ]
46672af6cb882e6b55802a61ae3817adf5328bf7
# Dataset of Nopht Caro Oracion This is the dataset of Nopht Caro Oracion, containing 34 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). | Name | Images | Download | Description | |:------------|---------:|:------------------------------------|:-------------------------------------------------------------------------| | raw | 34 | [Download](dataset-raw.zip) | Raw data with meta information. | | raw-stage3 | 75 | [Download](dataset-raw-stage3.zip) | 3-stage cropped raw data with meta information. | | 384x512 | 34 | [Download](dataset-384x512.zip) | 384x512 aligned dataset. | | 512x512 | 34 | [Download](dataset-512x512.zip) | 512x512 aligned dataset. | | 512x704 | 34 | [Download](dataset-512x704.zip) | 512x704 aligned dataset. | | 640x640 | 34 | [Download](dataset-640x640.zip) | 640x640 aligned dataset. | | 640x880 | 34 | [Download](dataset-640x880.zip) | 640x880 aligned dataset. | | stage3-640 | 75 | [Download](dataset-stage3-640.zip) | 3-stage cropped dataset with the shorter side not exceeding 640 pixels. | | stage3-800 | 75 | [Download](dataset-stage3-800.zip) | 3-stage cropped dataset with the shorter side not exceeding 800 pixels. | | stage3-1200 | 75 | [Download](dataset-stage3-1200.zip) | 3-stage cropped dataset with the shorter side not exceeding 1200 pixels. |
CyberHarem/nopht_sukasuka
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-08-28T14:25:42+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2023-09-17T16:25:48+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of Nopht Caro Oracion ============================= This is the dataset of Nopht Caro Oracion, containing 34 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization).
[]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n" ]
[ 44 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n" ]
1bc63da5938ed2e07d266aca48c65c7a84266b74
# Dataset Card for "church_classification" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
daveK91/church_classification
[ "region:us" ]
2023-08-28T14:26:24+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "pixel_values", "sequence": {"sequence": {"sequence": {"sequence": "float32"}}}}, {"name": "label", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 55644176.0, "num_examples": 92}, {"name": "test", "num_bytes": 12096560.0, "num_examples": 20}, {"name": "validation", "num_bytes": 10282076.0, "num_examples": 17}], "download_size": 18318297, "dataset_size": 78022812.0}}
2023-08-28T14:55:52+00:00
[]
[]
TAGS #region-us
# Dataset Card for "church_classification" More Information needed
[ "# Dataset Card for \"church_classification\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"church_classification\"\n\nMore Information needed" ]
[ 6, 16 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"church_classification\"\n\nMore Information needed" ]
c0966dc4e308ad854845658bf5fd60bd153905bd
# Dataset Card for "en_ar_transliterated" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
YassineBenlaria/en_ar_transliterated
[ "region:us" ]
2023-08-28T14:27:41+00:00
{"dataset_info": {"features": [{"name": "path", "dtype": "string"}, {"name": "audio", "dtype": {"audio": {"sampling_rate": 48000}}}, {"name": "sentence_lat", "dtype": "string"}, {"name": "sentence", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 299803083.0, "num_examples": 500}], "download_size": 262146455, "dataset_size": 299803083.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-08-28T14:27:53+00:00
[]
[]
TAGS #region-us
# Dataset Card for "en_ar_transliterated" More Information needed
[ "# Dataset Card for \"en_ar_transliterated\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"en_ar_transliterated\"\n\nMore Information needed" ]
[ 6, 17 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"en_ar_transliterated\"\n\nMore Information needed" ]
bf534eb6f9e8c1017dda2e4bdfb15e6b689e5400
# Dataset of Nephren Ruq Insania This is the dataset of Nephren Ruq Insania, containing 48 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). | Name | Images | Download | Description | |:------------|---------:|:------------------------------------|:-------------------------------------------------------------------------| | raw | 48 | [Download](dataset-raw.zip) | Raw data with meta information. | | raw-stage3 | 101 | [Download](dataset-raw-stage3.zip) | 3-stage cropped raw data with meta information. | | 384x512 | 48 | [Download](dataset-384x512.zip) | 384x512 aligned dataset. | | 512x512 | 48 | [Download](dataset-512x512.zip) | 512x512 aligned dataset. | | 512x704 | 48 | [Download](dataset-512x704.zip) | 512x704 aligned dataset. | | 640x640 | 48 | [Download](dataset-640x640.zip) | 640x640 aligned dataset. | | 640x880 | 48 | [Download](dataset-640x880.zip) | 640x880 aligned dataset. | | stage3-640 | 101 | [Download](dataset-stage3-640.zip) | 3-stage cropped dataset with the shorter side not exceeding 640 pixels. | | stage3-800 | 101 | [Download](dataset-stage3-800.zip) | 3-stage cropped dataset with the shorter side not exceeding 800 pixels. | | stage3-1200 | 101 | [Download](dataset-stage3-1200.zip) | 3-stage cropped dataset with the shorter side not exceeding 1200 pixels. |
CyberHarem/nephren_sukasuka
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-08-28T14:34:52+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2023-09-17T16:25:50+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of Nephren Ruq Insania ============================== This is the dataset of Nephren Ruq Insania, containing 48 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization).
[]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n" ]
[ 44 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n" ]
70f91f6cc678c6c64516b37a48ec1374e2d52a5b
# Dataset Card for "LEVIRCD_Cropped_256" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ericyu/LEVIRCD_Cropped_256
[ "region:us" ]
2023-08-28T14:35:08+00:00
{"dataset_info": {"features": [{"name": "imageA", "dtype": "image"}, {"name": "imageB", "dtype": "image"}, {"name": "label", "dtype": "image"}], "splits": [{"name": "train", "num_bytes": 287118335.44, "num_examples": 7120}, {"name": "test", "num_bytes": 73188109.824, "num_examples": 2048}, {"name": "val", "num_bytes": 34384403.584, "num_examples": 1024}], "download_size": 345121409, "dataset_size": 394690848.848}}
2024-02-07T16:48:34+00:00
[]
[]
TAGS #region-us
# Dataset Card for "LEVIRCD_Cropped_256" More Information needed
[ "# Dataset Card for \"LEVIRCD_Cropped_256\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"LEVIRCD_Cropped_256\"\n\nMore Information needed" ]
[ 6, 19 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"LEVIRCD_Cropped_256\"\n\nMore Information needed" ]
37cf776bec8df6248f21926d60c630e64eefd2ef
### Dataset Summary The Quora dataset is composed of question pairs, and the task is to determine if the questions are paraphrases of each other (have the same meaning). The dataset was translated to Portuguese using the model [seamless-m4t-medium](https://huggingface.co/facebook/seamless-m4t-medium). ### Languages Portuguese
luiseduardobrito/ptbr-quora-translated
[ "task_categories:text-classification", "language:pt", "quora", "seamless-m4t", "region:us" ]
2023-08-28T14:48:14+00:00
{"language": ["pt"], "task_categories": ["text-classification"], "tags": ["quora", "seamless-m4t"]}
2023-08-28T14:56:20+00:00
[]
[ "pt" ]
TAGS #task_categories-text-classification #language-Portuguese #quora #seamless-m4t #region-us
### Dataset Summary The Quora dataset is composed of question pairs, and the task is to determine if the questions are paraphrases of each other (have the same meaning). The dataset was translated to Portuguese using the model seamless-m4t-medium. ### Languages Portuguese
[ "### Dataset Summary\n\nThe Quora dataset is composed of question pairs, and the task is to determine if the questions are paraphrases of each \nother (have the same meaning). The dataset was translated to Portuguese using the model seamless-m4t-medium.", "### Languages\n\nPortuguese" ]
[ "TAGS\n#task_categories-text-classification #language-Portuguese #quora #seamless-m4t #region-us \n", "### Dataset Summary\n\nThe Quora dataset is composed of question pairs, and the task is to determine if the questions are paraphrases of each \nother (have the same meaning). The dataset was translated to Portuguese using the model seamless-m4t-medium.", "### Languages\n\nPortuguese" ]
[ 34, 66, 7 ]
[ "passage: TAGS\n#task_categories-text-classification #language-Portuguese #quora #seamless-m4t #region-us \n### Dataset Summary\n\nThe Quora dataset is composed of question pairs, and the task is to determine if the questions are paraphrases of each \nother (have the same meaning). The dataset was translated to Portuguese using the model seamless-m4t-medium.### Languages\n\nPortuguese" ]
9f25ad4f61fa645a0f7c2b40597646c71b3fa4cd
# Dataset Card for "wikipedia-fr" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
KasparZ/wikipedia-fr
[ "region:us" ]
2023-08-28T14:50:56+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "url", "dtype": "string"}, {"name": "title", "dtype": "string"}, {"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 7375791329, "num_examples": 2402095}], "download_size": 1422185286, "dataset_size": 7375791329}}
2023-08-28T15:00:59+00:00
[]
[]
TAGS #region-us
# Dataset Card for "wikipedia-fr" More Information needed
[ "# Dataset Card for \"wikipedia-fr\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"wikipedia-fr\"\n\nMore Information needed" ]
[ 6, 13 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"wikipedia-fr\"\n\nMore Information needed" ]
cff7ba71cbb5b3d47565e257744d7290e263393f
# Dataset of Tiat Siba Ignareo This is the dataset of Tiat Siba Ignareo, containing 32 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). | Name | Images | Download | Description | |:------------|---------:|:------------------------------------|:-------------------------------------------------------------------------| | raw | 32 | [Download](dataset-raw.zip) | Raw data with meta information. | | raw-stage3 | 61 | [Download](dataset-raw-stage3.zip) | 3-stage cropped raw data with meta information. | | 384x512 | 32 | [Download](dataset-384x512.zip) | 384x512 aligned dataset. | | 512x512 | 32 | [Download](dataset-512x512.zip) | 512x512 aligned dataset. | | 512x704 | 32 | [Download](dataset-512x704.zip) | 512x704 aligned dataset. | | 640x640 | 32 | [Download](dataset-640x640.zip) | 640x640 aligned dataset. | | 640x880 | 32 | [Download](dataset-640x880.zip) | 640x880 aligned dataset. | | stage3-640 | 61 | [Download](dataset-stage3-640.zip) | 3-stage cropped dataset with the shorter side not exceeding 640 pixels. | | stage3-800 | 61 | [Download](dataset-stage3-800.zip) | 3-stage cropped dataset with the shorter side not exceeding 800 pixels. | | stage3-1200 | 61 | [Download](dataset-stage3-1200.zip) | 3-stage cropped dataset with the shorter side not exceeding 1200 pixels. |
CyberHarem/tiat_sukasuka
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-08-28T14:52:07+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2023-09-17T16:25:52+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of Tiat Siba Ignareo ============================ This is the dataset of Tiat Siba Ignareo, containing 32 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization).
[]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n" ]
[ 44 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n" ]
636eae4084a3af656cf50b4d31b71763f5aec773
# Dataset Card for "dom_classification" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
daveK91/dom_classification
[ "region:us" ]
2023-08-28T14:57:42+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "pixel_values", "sequence": {"sequence": {"sequence": {"sequence": "float32"}}}}, {"name": "label", "dtype": {"class_label": {"names": {"0": "Anderes", "1": "Dreikoenigenschrein", "2": "Gerokreuz"}}}}], "splits": [{"name": "train", "num_bytes": 90724200.0, "num_examples": 150}, {"name": "test", "num_bytes": 19354496.0, "num_examples": 32}, {"name": "validation", "num_bytes": 16330356.0, "num_examples": 27}], "download_size": 28687705, "dataset_size": 126409052.0}}
2023-08-28T19:18:34+00:00
[]
[]
TAGS #region-us
# Dataset Card for "dom_classification" More Information needed
[ "# Dataset Card for \"dom_classification\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"dom_classification\"\n\nMore Information needed" ]
[ 6, 14 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"dom_classification\"\n\nMore Information needed" ]
86640fe7c0dc5995d1e615154f2dcda1ac89fc35
# Dataset Card for Dataset Name ## Dataset Description - **Homepage:** - **Repository:** - **Paper:** - **Leaderboard:** - **Point of Contact:** ### Dataset Summary This dataset card aims to be a base template for new datasets. It has been generated using [this raw template](https://github.com/huggingface/huggingface_hub/blob/main/src/huggingface_hub/templates/datasetcard_template.md?plain=1). ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
k19862217/sql-optimizer
[ "region:us" ]
2023-08-28T14:59:53+00:00
{}
2023-08-28T15:16:56+00:00
[]
[]
TAGS #region-us
# Dataset Card for Dataset Name ## Dataset Description - Homepage: - Repository: - Paper: - Leaderboard: - Point of Contact: ### Dataset Summary This dataset card aims to be a base template for new datasets. It has been generated using this raw template. ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Dataset Name", "## Dataset Description\n\n- Homepage: \n- Repository: \n- Paper: \n- Leaderboard: \n- Point of Contact:", "### Dataset Summary\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Dataset Name", "## Dataset Description\n\n- Homepage: \n- Repository: \n- Paper: \n- Leaderboard: \n- Point of Contact:", "### Dataset Summary\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 8, 24, 32, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Dataset Name## Dataset Description\n\n- Homepage: \n- Repository: \n- Paper: \n- Leaderboard: \n- Point of Contact:### Dataset Summary\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
ba531d378c2540161c61f496b55a1d858b23e71c
# Dataset of Nygglatho Astartus This is the dataset of Nygglatho Astartus, containing 40 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). | Name | Images | Download | Description | |:------------|---------:|:------------------------------------|:-------------------------------------------------------------------------| | raw | 40 | [Download](dataset-raw.zip) | Raw data with meta information. | | raw-stage3 | 105 | [Download](dataset-raw-stage3.zip) | 3-stage cropped raw data with meta information. | | 384x512 | 40 | [Download](dataset-384x512.zip) | 384x512 aligned dataset. | | 512x512 | 40 | [Download](dataset-512x512.zip) | 512x512 aligned dataset. | | 512x704 | 40 | [Download](dataset-512x704.zip) | 512x704 aligned dataset. | | 640x640 | 40 | [Download](dataset-640x640.zip) | 640x640 aligned dataset. | | 640x880 | 40 | [Download](dataset-640x880.zip) | 640x880 aligned dataset. | | stage3-640 | 105 | [Download](dataset-stage3-640.zip) | 3-stage cropped dataset with the shorter side not exceeding 640 pixels. | | stage3-800 | 105 | [Download](dataset-stage3-800.zip) | 3-stage cropped dataset with the shorter side not exceeding 800 pixels. | | stage3-1200 | 105 | [Download](dataset-stage3-1200.zip) | 3-stage cropped dataset with the shorter side not exceeding 1200 pixels. |
CyberHarem/nygglatho_sukasuka
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-08-28T15:01:30+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2023-09-17T16:25:54+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of Nygglatho Astartus ============================= This is the dataset of Nygglatho Astartus, containing 40 images and their tags. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization).
[]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n" ]
[ 44 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n" ]
3940b16d02fd96ee93eb4bd19bb5f573811351d3
Dump of 2023-08-20 of all french article in wikinews https://dumps.wikimedia.org/frwikinews/20230820/frwikinews-20230820-pages-articles.xml.bz2
Kant1/French_Wikinews_articles
[ "task_categories:text-generation", "language:fr", "region:us" ]
2023-08-28T15:12:48+00:00
{"language": ["fr"], "task_categories": ["text-generation"]}
2023-08-28T15:21:44+00:00
[]
[ "fr" ]
TAGS #task_categories-text-generation #language-French #region-us
Dump of 2023-08-20 of all french article in wikinews URL
[]
[ "TAGS\n#task_categories-text-generation #language-French #region-us \n" ]
[ 23 ]
[ "passage: TAGS\n#task_categories-text-generation #language-French #region-us \n" ]
cafef49cb420d8b06e9e984f8e0508ad9bdcbb93
Dump of 2023-08-20 of all french article in wikiversity https://dumps.wikimedia.org/frwikiversity/20230820/frwikiversity-20230820-pages-articles.xml.bz2
Kant1/French_Wikiversity_articles
[ "task_categories:text-generation", "language:fr", "region:us" ]
2023-08-28T15:20:24+00:00
{"language": ["fr"], "task_categories": ["text-generation"]}
2023-08-28T18:03:59+00:00
[]
[ "fr" ]
TAGS #task_categories-text-generation #language-French #region-us
Dump of 2023-08-20 of all french article in wikiversity URL
[]
[ "TAGS\n#task_categories-text-generation #language-French #region-us \n" ]
[ 23 ]
[ "passage: TAGS\n#task_categories-text-generation #language-French #region-us \n" ]
6c4e015485fb802ec5dc4fd8dd0bbf7ee17db00a
Dump of 2023-08-20 of all french article in wikivoyage https://dumps.wikimedia.org/frwikivoyage/20230820/frwikivoyage-20230820-pages-articles.xml.bz2
Kant1/French_Wikivoyage_articles
[ "task_categories:text-generation", "language:fr", "region:us" ]
2023-08-28T15:26:19+00:00
{"language": ["fr"], "task_categories": ["text-generation"]}
2023-08-28T15:27:59+00:00
[]
[ "fr" ]
TAGS #task_categories-text-generation #language-French #region-us
Dump of 2023-08-20 of all french article in wikivoyage URL
[]
[ "TAGS\n#task_categories-text-generation #language-French #region-us \n" ]
[ 23 ]
[ "passage: TAGS\n#task_categories-text-generation #language-French #region-us \n" ]
39d0be8c39e57fbfb823fcae58aa419dcd0e239a
Dump of 2023-08-20 of all french article in wikiquote https://dumps.wikimedia.org/frwikiquote/20230820/frwikiquote-20230820-pages-articles.xml.bz2
Kant1/French_Wikiquote_articles
[ "task_categories:text-generation", "language:fr", "region:us" ]
2023-08-28T15:32:24+00:00
{"language": ["fr"], "task_categories": ["text-generation"]}
2023-08-28T15:33:26+00:00
[]
[ "fr" ]
TAGS #task_categories-text-generation #language-French #region-us
Dump of 2023-08-20 of all french article in wikiquote URL
[]
[ "TAGS\n#task_categories-text-generation #language-French #region-us \n" ]
[ 23 ]
[ "passage: TAGS\n#task_categories-text-generation #language-French #region-us \n" ]
2e47d7fd8eac115e104bff0e8c49cef5269aba09
# Dataset Card for "eu_test2" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
KatMarie/eu_test2
[ "region:us" ]
2023-08-28T15:55:37+00:00
{"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 606653.618232792, "num_examples": 10331}], "download_size": 416014, "dataset_size": 606653.618232792}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-08-29T14:07:27+00:00
[]
[]
TAGS #region-us
# Dataset Card for "eu_test2" More Information needed
[ "# Dataset Card for \"eu_test2\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"eu_test2\"\n\nMore Information needed" ]
[ 6, 14 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"eu_test2\"\n\nMore Information needed" ]
016468d105210a3e2ce7a2c1ce2a1ae0b3da3f66
# Dataset Card for "mc4_meta" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
eduagarcia/mc4-pt
[ "region:us" ]
2023-08-28T15:55:46+00:00
{"dataset_info": {"features": [{"name": "text", "dtype": "string"}, {"name": "timestamp", "dtype": "string"}, {"name": "url", "dtype": "string"}, {"name": "meta", "struct": [{"name": "dedup", "struct": [{"name": "exact_norm", "struct": [{"name": "cluster_main_idx", "dtype": "int64"}, {"name": "cluster_size", "dtype": "int64"}, {"name": "exact_hash_idx", "dtype": "int64"}, {"name": "is_duplicate", "dtype": "bool"}]}, {"name": "minhash", "struct": [{"name": "cluster_main_idx", "dtype": "int64"}, {"name": "cluster_size", "dtype": "int64"}, {"name": "is_duplicate", "dtype": "bool"}, {"name": "minhash_idx", "dtype": "int64"}]}]}]}], "splits": [{"name": "train", "num_bytes": 520490704647, "num_examples": 169073811}], "download_size": 600645287, "dataset_size": 520490704647}}
2023-08-30T05:00:13+00:00
[]
[]
TAGS #region-us
# Dataset Card for "mc4_meta" More Information needed
[ "# Dataset Card for \"mc4_meta\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"mc4_meta\"\n\nMore Information needed" ]
[ 6, 15 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"mc4_meta\"\n\nMore Information needed" ]
f08672ab625766b8f8e691bdd5a04d34e07d0acd
# Dataset Card for "voxelgym_5c_critic_42x42_175000" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Cubpaw/voxelgym_5c_critic_42x42_175000
[ "region:us" ]
2023-08-28T16:05:40+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "astar_path", "dtype": "image"}, {"name": "pred_path", "sequence": {"sequence": "float32"}}], "splits": [{"name": "train", "num_bytes": 1058558980.0, "num_examples": 140000}, {"name": "validation", "num_bytes": 264668655.0, "num_examples": 35000}], "download_size": 299922529, "dataset_size": 1323227635.0}}
2023-08-28T16:09:44+00:00
[]
[]
TAGS #region-us
# Dataset Card for "voxelgym_5c_critic_42x42_175000" More Information needed
[ "# Dataset Card for \"voxelgym_5c_critic_42x42_175000\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"voxelgym_5c_critic_42x42_175000\"\n\nMore Information needed" ]
[ 6, 26 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"voxelgym_5c_critic_42x42_175000\"\n\nMore Information needed" ]
cafebe9a0f5d9678bcde9a78efe207b9efeb44a4
# Dataset Card for "KoAlpaca-v1.1a_ppl" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
nayohan/KoAlpaca-v1.1a_ppl
[ "region:us" ]
2023-08-28T16:26:04+00:00
{"dataset_info": {"features": [{"name": "instruction", "dtype": "string"}, {"name": "output", "dtype": "string"}, {"name": "url", "dtype": "string"}, {"name": "ppl", "dtype": "float64"}, {"name": "len", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 23697723, "num_examples": 21151}], "download_size": 12981794, "dataset_size": 23697723}}
2023-08-28T16:29:13+00:00
[]
[]
TAGS #region-us
# Dataset Card for "KoAlpaca-v1.1a_ppl" More Information needed
[ "# Dataset Card for \"KoAlpaca-v1.1a_ppl\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"KoAlpaca-v1.1a_ppl\"\n\nMore Information needed" ]
[ 6, 21 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"KoAlpaca-v1.1a_ppl\"\n\nMore Information needed" ]
ae3159db818477706413300c5dcb497351de4591
# Dataset Card for "test_ocean_clients" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
cesarali/test_ocean_clients
[ "region:us" ]
2023-08-28T16:26:35+00:00
{"dataset_info": {"features": [{"name": "o", "dtype": "int64"}, {"name": "c", "dtype": "int64"}, {"name": "e", "dtype": "int64"}, {"name": "a", "dtype": "int64"}, {"name": "n", "dtype": "int64"}, {"name": "education", "dtype": "string"}, {"name": "current_work", "dtype": "string"}, {"name": "ethnicity", "dtype": "string"}, {"name": "income_bracket", "dtype": "string"}, {"name": "age", "dtype": "int64"}, {"name": "sexual_orientation", "dtype": "string"}, {"name": "favorite_movies", "sequence": "string"}, {"name": "favorite_tv_shows", "sequence": "string"}, {"name": "comment", "dtype": "string"}, {"name": "id", "dtype": "string"}, {"name": "location", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 6755, "num_examples": 20}], "download_size": 12166, "dataset_size": 6755}}
2023-08-28T16:26:38+00:00
[]
[]
TAGS #region-us
# Dataset Card for "test_ocean_clients" More Information needed
[ "# Dataset Card for \"test_ocean_clients\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"test_ocean_clients\"\n\nMore Information needed" ]
[ 6, 17 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"test_ocean_clients\"\n\nMore Information needed" ]
919bb034ba8fd17a53ef91ed25ca3456b434e688
# Dataset Card for "test_ipp50" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
cesarali/test_ipp50
[ "region:us" ]
2023-08-28T16:28:33+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "question", "dtype": "string"}, {"name": "choices", "sequence": "string"}, {"name": "value", "dtype": "float64"}], "splits": [{"name": "train", "num_bytes": 8439, "num_examples": 50}], "download_size": 4060, "dataset_size": 8439}}
2023-08-28T16:28:36+00:00
[]
[]
TAGS #region-us
# Dataset Card for "test_ipp50" More Information needed
[ "# Dataset Card for \"test_ipp50\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"test_ipp50\"\n\nMore Information needed" ]
[ 6, 15 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"test_ipp50\"\n\nMore Information needed" ]
920be7fd952a278137b8c453f04b67b18f5c9c33
# Dataset Card for "embedding_100_test" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Nazzaroth2/embedding_100_test
[ "region:us" ]
2023-08-28T16:28:45+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "dataset_info": {"features": [{"name": "lang", "dtype": "string"}, {"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 8823, "num_examples": 200}], "download_size": 5829, "dataset_size": 8823}}
2023-08-28T16:28:48+00:00
[]
[]
TAGS #region-us
# Dataset Card for "embedding_100_test" More Information needed
[ "# Dataset Card for \"embedding_100_test\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"embedding_100_test\"\n\nMore Information needed" ]
[ 6, 16 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"embedding_100_test\"\n\nMore Information needed" ]
23379ea3a1886c036f2c75de62433f3d655608ec
# MoleculeNet Benchmark ([website](https://moleculenet.org/)) MoleculeNet is a benchmark specially designed for testing machine learning methods of molecular properties. As we aim to facilitate the development of molecular machine learning method, this work curates a number of dataset collections, creates a suite of software that implements many known featurizations and previously proposed algorithms. All methods and datasets are integrated as parts of the open source DeepChem package(MIT license). MoleculeNet is built upon multiple public databases. The full collection currently includes over 700,000 compounds tested on a range of different properties. We test the performances of various machine learning models with different featurizations on the datasets(detailed descriptions here), with all results reported in AUC-ROC, AUC-PRC, RMSE and MAE scores. For users, please cite: Zhenqin Wu, Bharath Ramsundar, Evan N. Feinberg, Joseph Gomes, Caleb Geniesse, Aneesh S. Pappu, Karl Leswing, Vijay Pande, MoleculeNet: A Benchmark for Molecular Machine Learning, arXiv preprint, arXiv: 1703.00564, 2017.
katielink/moleculenet-benchmark
[ "license:apache-2.0", "biology", "chemistry", "region:us" ]
2023-08-28T16:36:25+00:00
{"license": "apache-2.0", "tags": ["biology", "chemistry"], "configs": [{"config_name": "bace", "data_files": [{"split": "train", "path": "bace/train.csv"}, {"split": "test", "path": "bace/test.csv"}, {"split": "val", "path": "bace/valid.csv"}]}, {"config_name": "bbbp", "data_files": [{"split": "train", "path": "bbbp/train.csv"}, {"split": "test", "path": "bbbp/test.csv"}, {"split": "val", "path": "bbbp/valid.csv"}]}, {"config_name": "clintox", "data_files": [{"split": "train", "path": "clintox/train.csv"}, {"split": "test", "path": "clintox/test.csv"}, {"split": "val", "path": "clintox/valid.csv"}]}, {"config_name": "esol", "data_files": [{"split": "train", "path": "esol/train.csv"}, {"split": "test", "path": "esol/test.csv"}, {"split": "val", "path": "esol/valid.csv"}]}, {"config_name": "freesolv", "data_files": [{"split": "train", "path": "freesolv/train.csv"}, {"split": "test", "path": "freesolv/test.csv"}, {"split": "val", "path": "freesolv/valid.csv"}]}, {"config_name": "hiv", "data_files": [{"split": "train", "path": "hiv/train.csv"}, {"split": "test", "path": "hiv/test.csv"}, {"split": "val", "path": "hiv/valid.csv"}]}, {"config_name": "lipo", "data_files": [{"split": "train", "path": "lipo/train.csv"}, {"split": "test", "path": "lipo/test.csv"}, {"split": "val", "path": "lipo/valid.csv"}]}, {"config_name": "qm9", "data_files": [{"split": "train", "path": "qm9/train.csv"}, {"split": "test", "path": "qm9/test.csv"}, {"split": "val", "path": "qm9/valid.csv"}]}, {"config_name": "sider", "data_files": [{"split": "train", "path": "sider/train.csv"}, {"split": "test", "path": "sider/test.csv"}, {"split": "val", "path": "sider/valid.csv"}]}, {"config_name": "tox21", "data_files": [{"split": "train", "path": "tox21/train.csv"}, {"split": "test", "path": "tox21/test.csv"}, {"split": "val", "path": "tox21/valid.csv"}]}]}
2023-08-28T16:51:14+00:00
[]
[]
TAGS #license-apache-2.0 #biology #chemistry #region-us
# MoleculeNet Benchmark (website) MoleculeNet is a benchmark specially designed for testing machine learning methods of molecular properties. As we aim to facilitate the development of molecular machine learning method, this work curates a number of dataset collections, creates a suite of software that implements many known featurizations and previously proposed algorithms. All methods and datasets are integrated as parts of the open source DeepChem package(MIT license). MoleculeNet is built upon multiple public databases. The full collection currently includes over 700,000 compounds tested on a range of different properties. We test the performances of various machine learning models with different featurizations on the datasets(detailed descriptions here), with all results reported in AUC-ROC, AUC-PRC, RMSE and MAE scores. For users, please cite: Zhenqin Wu, Bharath Ramsundar, Evan N. Feinberg, Joseph Gomes, Caleb Geniesse, Aneesh S. Pappu, Karl Leswing, Vijay Pande, MoleculeNet: A Benchmark for Molecular Machine Learning, arXiv preprint, arXiv: 1703.00564, 2017.
[ "# MoleculeNet Benchmark (website)\n\nMoleculeNet is a benchmark specially designed for testing machine learning methods of molecular properties. As we aim to facilitate the development of molecular machine learning method, this work curates a number of dataset collections, creates a suite of software that implements many known featurizations and previously proposed algorithms. All methods and datasets are integrated as parts of the open source DeepChem package(MIT license).\n\nMoleculeNet is built upon multiple public databases. The full collection currently includes over 700,000 compounds tested on a range of different properties. We test the performances of various machine learning models with different featurizations on the datasets(detailed descriptions here), with all results reported in AUC-ROC, AUC-PRC, RMSE and MAE scores.\n\nFor users, please cite:\nZhenqin Wu, Bharath Ramsundar, Evan N. Feinberg, Joseph Gomes, Caleb Geniesse, Aneesh S. Pappu, Karl Leswing, Vijay Pande, MoleculeNet: A Benchmark for Molecular Machine Learning, arXiv preprint, arXiv: 1703.00564, 2017." ]
[ "TAGS\n#license-apache-2.0 #biology #chemistry #region-us \n", "# MoleculeNet Benchmark (website)\n\nMoleculeNet is a benchmark specially designed for testing machine learning methods of molecular properties. As we aim to facilitate the development of molecular machine learning method, this work curates a number of dataset collections, creates a suite of software that implements many known featurizations and previously proposed algorithms. All methods and datasets are integrated as parts of the open source DeepChem package(MIT license).\n\nMoleculeNet is built upon multiple public databases. The full collection currently includes over 700,000 compounds tested on a range of different properties. We test the performances of various machine learning models with different featurizations on the datasets(detailed descriptions here), with all results reported in AUC-ROC, AUC-PRC, RMSE and MAE scores.\n\nFor users, please cite:\nZhenqin Wu, Bharath Ramsundar, Evan N. Feinberg, Joseph Gomes, Caleb Geniesse, Aneesh S. Pappu, Karl Leswing, Vijay Pande, MoleculeNet: A Benchmark for Molecular Machine Learning, arXiv preprint, arXiv: 1703.00564, 2017." ]
[ 21, 260 ]
[ "passage: TAGS\n#license-apache-2.0 #biology #chemistry #region-us \n# MoleculeNet Benchmark (website)\n\nMoleculeNet is a benchmark specially designed for testing machine learning methods of molecular properties. As we aim to facilitate the development of molecular machine learning method, this work curates a number of dataset collections, creates a suite of software that implements many known featurizations and previously proposed algorithms. All methods and datasets are integrated as parts of the open source DeepChem package(MIT license).\n\nMoleculeNet is built upon multiple public databases. The full collection currently includes over 700,000 compounds tested on a range of different properties. We test the performances of various machine learning models with different featurizations on the datasets(detailed descriptions here), with all results reported in AUC-ROC, AUC-PRC, RMSE and MAE scores.\n\nFor users, please cite:\nZhenqin Wu, Bharath Ramsundar, Evan N. Feinberg, Joseph Gomes, Caleb Geniesse, Aneesh S. Pappu, Karl Leswing, Vijay Pande, MoleculeNet: A Benchmark for Molecular Machine Learning, arXiv preprint, arXiv: 1703.00564, 2017." ]
74e74f2e4507ef256fe536f78a776f4a1ff67955
# Do-Not-Answer: A Dataset for Evaluating Safeguards in LLMs ## Overview *Do not answer* is an open-source dataset to evaluate LLMs' safety mechanism at a low cost. The dataset is curated and filtered to consist only of prompts to which responsible language models do not answer. Besides human annotations, *Do not answer* also implements model-based evaluation, where a 600M fine-tuned BERT-like evaluator achieves comparable results with human and GPT-4. ## Instruction Taxonomy Instructions are organized by a three-level hierarchical taxonomy, covering 61 specific harms. ![taxonomy](assets/dna.png) *Do not answer* collects 939 instructions, the distribution across five risk areas and 12 harm types is shown below: ![risk_area](assets/instruction_dist.png) ## Response Assessment We assess six models' responses (GPT-4, ChatGPT, Claude, LLaMA-2, Vicuna, ChatGLM2) from two levels. * Harmfulness: whether a response is harmful (a binary label). * Action category: responses are categorized into six distinct groups, each representing a specific response type. ![action](assets/action.png) ### Human Evaluation High percentage of safe responses across five risk areas for six models demonstrates that they are basically safe. LLaMA-2 ranks the first, followed by ChatGPT, Claude, GPT-4, and Vicuna, with ChatGLM2 ranking the last by 85 harmful responses among 939. * Harmlessness Rank ![harmful_num](assets/harmful_num.png) * Action Category Distribution * ![action_distribution](assets/action_dist.png) ### Automatic Evaluation Longformer evaluators achieve comparable results with human and GPT-4. * GPT-4 evaluation * Fine-tuned Longformer classifier (600M) | Model | Human | GPT-4 | Longformer | |:---------------|--------:|--------:|-------------:| | LLaMA-2 | 99.7 | 99.4 | 99.3 | | ChatGPT | 98.5 | 97.7 | 97.9 | | Claude | 98.3 | 98.3 | 97.6 | | GPT-4 | 97.6 | 96.5 | 97.2 | | vicuna | 94.5 | 94.9 | 95.0 | | ChatGLM2 | 90.9 | 92.9 | 92.9 | ## Citation If our work is useful for your own, you can cite us with the following BibTex entry: ```bibtex @misc{wang2023donotanswer, author = {Wang, Yuxia, and Li, Haonan and Han, Xudong and Nakov, Preslav and Baldwin, Timothy}, title = {Do-Not-Answer: A Dataset for Evaluating Safeguards in LLMs}, year = {2023}, howpublished = {arXiv preprint arXiv:2308.13387}, } ``` ## License All datasets in this repository are released under the [Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License](http://creativecommons.org/licenses/by-nc-sa/4.0/). All source files in this repository are released under the Apache 2.0 license, the text of which can be found in the LICENSE file.
LibrAI/do-not-answer
[ "task_categories:text-generation", "size_categories:n<1K", "language:en", "license:apache-2.0", "safety", "large language model", "region:us" ]
2023-08-28T16:37:24+00:00
{"language": ["en"], "license": "apache-2.0", "size_categories": ["n<1K"], "task_categories": ["text-generation"], "pretty_name": "do-not-answer", "tags": ["safety", "large language model"], "dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "risk_area", "dtype": "string"}, {"name": "types_of_harm", "dtype": "string"}, {"name": "specific_harms", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "GPT4_response", "dtype": "string"}, {"name": "GPT4_harmful", "dtype": "int64"}, {"name": "GPT4_action", "dtype": "int64"}, {"name": "ChatGPT_response", "dtype": "string"}, {"name": "ChatGPT_harmful", "dtype": "int64"}, {"name": "ChatGPT_action", "dtype": "int64"}, {"name": "Claude_response", "dtype": "string"}, {"name": "Claude_harmful", "dtype": "int64"}, {"name": "Claude_action", "dtype": "int64"}, {"name": "ChatGLM2_response", "dtype": "string"}, {"name": "ChatGLM2_harmful", "dtype": "int64"}, {"name": "ChatGLM2_action", "dtype": "int64"}, {"name": "llama2-7b-chat_response", "dtype": "string"}, {"name": "llama2-7b-chat_harmful", "dtype": "int64"}, {"name": "llama2-7b-chat_action", "dtype": "int64"}, {"name": "vicuna-7b_response", "dtype": "string"}, {"name": "vicuna-7b_harmful", "dtype": "int64"}, {"name": "vicuna-7b_action", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 3997193, "num_examples": 939}], "download_size": 1709142, "dataset_size": 3997193}}
2023-08-28T17:22:52+00:00
[]
[ "en" ]
TAGS #task_categories-text-generation #size_categories-n<1K #language-English #license-apache-2.0 #safety #large language model #region-us
Do-Not-Answer: A Dataset for Evaluating Safeguards in LLMs ========================================================== Overview -------- *Do not answer* is an open-source dataset to evaluate LLMs' safety mechanism at a low cost. The dataset is curated and filtered to consist only of prompts to which responsible language models do not answer. Besides human annotations, *Do not answer* also implements model-based evaluation, where a 600M fine-tuned BERT-like evaluator achieves comparable results with human and GPT-4. Instruction Taxonomy -------------------- Instructions are organized by a three-level hierarchical taxonomy, covering 61 specific harms. !taxonomy *Do not answer* collects 939 instructions, the distribution across five risk areas and 12 harm types is shown below: !risk\_area Response Assessment ------------------- We assess six models' responses (GPT-4, ChatGPT, Claude, LLaMA-2, Vicuna, ChatGLM2) from two levels. * Harmfulness: whether a response is harmful (a binary label). * Action category: responses are categorized into six distinct groups, each representing a specific response type. !action ### Human Evaluation High percentage of safe responses across five risk areas for six models demonstrates that they are basically safe. LLaMA-2 ranks the first, followed by ChatGPT, Claude, GPT-4, and Vicuna, with ChatGLM2 ranking the last by 85 harmful responses among 939. * Harmlessness Rank !harmful\_num * Action Category Distribution * !action\_distribution ### Automatic Evaluation Longformer evaluators achieve comparable results with human and GPT-4. * GPT-4 evaluation * Fine-tuned Longformer classifier (600M) If our work is useful for your own, you can cite us with the following BibTex entry: License ------- All datasets in this repository are released under the Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License. All source files in this repository are released under the Apache 2.0 license, the text of which can be found in the LICENSE file.
[ "### Human Evaluation\n\n\nHigh percentage of safe responses across five risk areas for six models demonstrates that they are basically safe.\nLLaMA-2 ranks the first, followed by ChatGPT, Claude, GPT-4, and Vicuna, with ChatGLM2 ranking the last by 85 harmful responses among 939.\n\n\n* Harmlessness Rank\n\n\n!harmful\\_num\n\n\n* Action Category Distribution\n* \n\n\n!action\\_distribution", "### Automatic Evaluation\n\n\nLongformer evaluators achieve comparable results with human and GPT-4.\n\n\n* GPT-4 evaluation\n* Fine-tuned Longformer classifier (600M)\n\n\n\nIf our work is useful for your own, you can cite us with the following BibTex entry:\n\n\nLicense\n-------\n\n\nAll datasets in this repository are released under the\nCreative Commons Attribution-NonCommercial-ShareAlike 4.0 International License. All source files in this repository are released under the Apache 2.0 license, the text of which can be found in the LICENSE file." ]
[ "TAGS\n#task_categories-text-generation #size_categories-n<1K #language-English #license-apache-2.0 #safety #large language model #region-us \n", "### Human Evaluation\n\n\nHigh percentage of safe responses across five risk areas for six models demonstrates that they are basically safe.\nLLaMA-2 ranks the first, followed by ChatGPT, Claude, GPT-4, and Vicuna, with ChatGLM2 ranking the last by 85 harmful responses among 939.\n\n\n* Harmlessness Rank\n\n\n!harmful\\_num\n\n\n* Action Category Distribution\n* \n\n\n!action\\_distribution", "### Automatic Evaluation\n\n\nLongformer evaluators achieve comparable results with human and GPT-4.\n\n\n* GPT-4 evaluation\n* Fine-tuned Longformer classifier (600M)\n\n\n\nIf our work is useful for your own, you can cite us with the following BibTex entry:\n\n\nLicense\n-------\n\n\nAll datasets in this repository are released under the\nCreative Commons Attribution-NonCommercial-ShareAlike 4.0 International License. All source files in this repository are released under the Apache 2.0 license, the text of which can be found in the LICENSE file." ]
[ 47, 96, 121 ]
[ "passage: TAGS\n#task_categories-text-generation #size_categories-n<1K #language-English #license-apache-2.0 #safety #large language model #region-us \n### Human Evaluation\n\n\nHigh percentage of safe responses across five risk areas for six models demonstrates that they are basically safe.\nLLaMA-2 ranks the first, followed by ChatGPT, Claude, GPT-4, and Vicuna, with ChatGLM2 ranking the last by 85 harmful responses among 939.\n\n\n* Harmlessness Rank\n\n\n!harmful\\_num\n\n\n* Action Category Distribution\n* \n\n\n!action\\_distribution### Automatic Evaluation\n\n\nLongformer evaluators achieve comparable results with human and GPT-4.\n\n\n* GPT-4 evaluation\n* Fine-tuned Longformer classifier (600M)\n\n\n\nIf our work is useful for your own, you can cite us with the following BibTex entry:\n\n\nLicense\n-------\n\n\nAll datasets in this repository are released under the\nCreative Commons Attribution-NonCommercial-ShareAlike 4.0 International License. All source files in this repository are released under the Apache 2.0 license, the text of which can be found in the LICENSE file." ]
c515d6a3b2f3db4bc1650068bb4353cec04f07c6
# Dataset Card for "music_align" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ryanc/music_align
[ "region:us" ]
2023-08-28T16:54:14+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "dataset_info": {"features": [{"name": "caption", "dtype": "string"}, {"name": "audio", "dtype": "audio"}], "splits": [{"name": "train", "num_bytes": 16132095937.715, "num_examples": 8537}], "download_size": 1862624886, "dataset_size": 16132095937.715}}
2023-08-29T01:50:52+00:00
[]
[]
TAGS #region-us
# Dataset Card for "music_align" More Information needed
[ "# Dataset Card for \"music_align\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"music_align\"\n\nMore Information needed" ]
[ 6, 13 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"music_align\"\n\nMore Information needed" ]
7f06ef5b5462fc2d62805258f2b0b3c0bd80c866
# Dataset Card for "audio_align" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ryanc/audio_align
[ "region:us" ]
2023-08-28T16:56:45+00:00
{"dataset_info": {"features": [{"name": "caption", "dtype": "string"}, {"name": "audio", "dtype": "audio"}], "splits": [{"name": "train", "num_bytes": 63369072189.92, "num_examples": 38120}], "download_size": 28087027560, "dataset_size": 63369072189.92}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-08-29T06:51:23+00:00
[]
[]
TAGS #region-us
# Dataset Card for "audio_align" More Information needed
[ "# Dataset Card for \"audio_align\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"audio_align\"\n\nMore Information needed" ]
[ 6, 14 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"audio_align\"\n\nMore Information needed" ]
228879eeb63a9f2d06e7cfefda50a4d479e0fcfe
# Dataset Card for "llama_2_optimized_product_titles-esci" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
qazisaad/llama_2_optimized_product_titles-esci
[ "region:us" ]
2023-08-28T17:22:54+00:00
{"dataset_info": {"features": [{"name": "index", "dtype": "int64"}, {"name": "product_title", "dtype": "string"}, {"name": "average_score", "dtype": "float64"}, {"name": "total_score", "dtype": "float64"}, {"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 6296294, "num_examples": 2199}], "download_size": 987749, "dataset_size": 6296294}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-08-29T22:46:25+00:00
[]
[]
TAGS #region-us
# Dataset Card for "llama_2_optimized_product_titles-esci" More Information needed
[ "# Dataset Card for \"llama_2_optimized_product_titles-esci\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"llama_2_optimized_product_titles-esci\"\n\nMore Information needed" ]
[ 6, 25 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"llama_2_optimized_product_titles-esci\"\n\nMore Information needed" ]
ed0ed8685680e8068fe5ca4434dfb504a32591d2
# Summary `aya-telugu-food-recipes` is an open source dataset of instruct-style records generated by webscraping a Telugu food recipes website. This was created as part of [Aya Open Science Initiative](https://sites.google.com/cohere.com/aya-en/home) from Cohere For AI. This dataset can be used for any purpose, whether academic or commercial, under the terms of the [Apache 2.0](https://opensource.org/license/apache-2-0) License. Supported Tasks: - Training LLMs - Synthetic Data Generation - Data Augmentation Languages: Telugu Version: 1.0 # Dataset Overview `aya-telugu-food-recipes` is a corpus of more than 400 records generated by webscraping of the Telugu Food Recipes Website. This Dataset can be used for the following task: - Given the name of the food item, generates the detailed recipe along with the ingredients. # Intended Uses While immediately valuable for instruction fine tuning large language models, as a corpus of instruction prompts, this dataset also presents a valuable opportunity for synthetic data generation in the methods. For example, prompt-completions could be submitted as few-shot examples to a large open language model to generate additional food recipes. # Dataset ## Load with Datasets To load this dataset with Datasets, you'll just need to install Datasets as `pip install datasets --upgrade` and then use the following code: ```python from datasets import load_dataset ds = load_dataset('SuryaKrishna02/aya-telugu-food-recipes') ``` ## Purpose of Collection Telugu is a low-resource language where there no food recipes instruct-style dataset to the best of my knowledge. This was created as a part of [Aya Open Science Initiative](https://sites.google.com/cohere.com/aya-en/home) from Cohere For AI to make sure Telugu is well represented in the space of AI/ML. Unlike other datasets that are limited to non-commercial use, this dataset can be used, modified, and extended for any purpose, including academic or commercial applications. ## Sources - **Andhrajyothi Website**: Performed webscraping from [Andhrajyothi Website](https://www.andhrajyothy.com/vantalu) which is a famous telugu website consisting of food recipes in the following categories: 1. తీపి వంటలు 2. పచ్చళ్లు 3. శాకాహారం 4. మాంసాహారం - Next, performed some pre-processing of the data like removing unwanted characters, extracting ingredients and cooking instructions seperately from the scraped data. Finally, converted the scraped data into Instruct-style prompts and completions. ## Data Fields - `inputs` : Prompt or input to the language model. - `targets` : Completion or output of the language model. - `template_id` : Id of the template used in `inputs` and `targets`. - `template_lang`: ISO code of the language used in the `inputs` and `targets` where *tel* refers to Telugu. ## Templates For the creation of instruct-style prompts and completions from the scraped data, the following one template category with 7 different templates were used: 1. Given the name of the food item, generates the detailed recipe along with the ingredients. | template_id | inputs | targets | |-------------|--------|---------| | 1 | ```{{Food Item}} ఎలా తయారు చేస్తారో క్లుప్తంగా ఇవ్వండి.``` | ```{{Food Item}} కి కావలసిన పదార్ధాలు మరియు తయారు చేసే విధానం ఇక్కడ ఇవ్వబడింది.\nకావలసిన పదార్థాలు:\n{{Ingredients}}\n\nతయారుచేసే విధానం:\n{{Cooking Instructions}}``` | | 2 | ```మీరు {{Food Item}} తయారు చెయ్యడానికి ఎటువంటి తిండి పదార్ధాలు వాడుతురు మరియు ఏ విధముగా చేస్తారో వివరణ ఇవ్వండి.``` | ```{{Food Item}} కి కావలసిన పదార్ధాలు మరియు తయారు చేసే విధానం ఇక్కడ ఇవ్వబడింది.\nకావలసిన పదార్థాలు:\n{{Ingredients}}\n\nతయారుచేసే విధానం:\n{{Cooking Instructions}}``` | | 3 | ```నేను {{Food Item}} చాలా రుచికరంగా ఉంటుంది అని విన్నాను. నాకు ఇది ఎలా చెయ్యాలో సంక్లిప్తంగా చెప్పు.``` | ```{{Food Item}} కి కావలసిన పదార్ధాలు మరియు తయారు చేసే విధానం ఇక్కడ ఇవ్వబడింది.\nకావలసిన పదార్థాలు:\n{{Ingredients}}\n\nతయారుచేసే విధానం:\n{{Cooking Instructions}}``` | | 4 | ```మొదటిసారి వంట చేసేవారికి చెప్పినట్టు నాకు {{Food Item}} ఎలా చెయ్యాలొ చెప్పు``` | ```{{Food Item}} కి కావలసిన పదార్ధాలు మరియు తయారు చేసే విధానం ఇక్కడ ఇవ్వబడింది.\nకావలసిన పదార్థాలు:\n{{Ingredients}}\n\nతయారుచేసే విధానం:\n{{Cooking Instructions}}``` | | 5 | ```{{Food Item}} ఎలా చేయాలి? సమాధానం లో కావలసిన పదార్ధాలు మరియు తయారు చేసే విధానం ఉండాలి.``` | ```{{Food Item}} కి కావలసిన పదార్ధాలు మరియు తయారు చేసే విధానం ఇక్కడ ఇవ్వబడింది.\nకావలసిన పదార్థాలు:\n{{Ingredients}}\n\nతయారుచేసే విధానం:\n{{Cooking Instructions}}``` | | 6 | ```{{Food Item}} ఎలా తయారు చేస్తాం?``` | ```{{Food Item}} కి కావలసిన పదార్ధాలు మరియు తయారు చేసే విధానం ఇక్కడ ఇవ్వబడింది.\nకావలసిన పదార్థాలు:\n{{Ingredients}}\n\nతయారుచేసే విధానం:\n{{Cooking Instructions}}``` | | 7 | ```{{Food Item}} రెసిపీ ఏంటి?``` | ```{{Food Item}} కి కావలసిన పదార్ధాలు మరియు తయారు చేసే విధానం ఇక్కడ ఇవ్వబడింది.\nకావలసిన పదార్థాలు:\n{{Ingredients}}\n\nతయారుచేసే విధానం:\n{{Cooking Instructions}}``` | ## Personal or Sensitive Data This dataset contains public information. To our knowledge, there are no private person’s personal identifiers or sensitive information. ## Language Telugu # Known Limitations - The Dataset is scraped from the food recipes website and the contents of this dataset may reflect the bias. - Although there is utmost care taken to keep the dataset as monolingual, there might be some records that may contain English Language along with Telugu. # Contributors [SuryaKrishna02](https://github.com/SuryaKrishna02) and [Desik98](https://github.com/desik1998)
SuryaKrishna02/aya-telugu-food-recipes
[ "task_categories:text-generation", "task_ids:language-modeling", "annotations_creators:expert-generated", "language_creators:expert-generated", "multilinguality:monolingual", "size_categories:n<1K", "source_datasets:original", "language:te", "license:apache-2.0", "food", "recipes", "region:us" ]
2023-08-28T18:02:43+00:00
{"annotations_creators": ["expert-generated"], "language_creators": ["expert-generated"], "language": ["te"], "license": ["apache-2.0"], "multilinguality": ["monolingual"], "size_categories": ["n<1K"], "source_datasets": ["original"], "task_categories": ["text-generation"], "task_ids": ["language-modeling"], "pretty_name": "Telugu Food Recipes", "tags": ["food", "recipes"]}
2024-01-23T14:40:26+00:00
[]
[ "te" ]
TAGS #task_categories-text-generation #task_ids-language-modeling #annotations_creators-expert-generated #language_creators-expert-generated #multilinguality-monolingual #size_categories-n<1K #source_datasets-original #language-Telugu #license-apache-2.0 #food #recipes #region-us
Summary ======= 'aya-telugu-food-recipes' is an open source dataset of instruct-style records generated by webscraping a Telugu food recipes website. This was created as part of Aya Open Science Initiative from Cohere For AI. This dataset can be used for any purpose, whether academic or commercial, under the terms of the Apache 2.0 License. Supported Tasks: * Training LLMs * Synthetic Data Generation * Data Augmentation Languages: Telugu Version: 1.0 Dataset Overview ================ 'aya-telugu-food-recipes' is a corpus of more than 400 records generated by webscraping of the Telugu Food Recipes Website. This Dataset can be used for the following task: * Given the name of the food item, generates the detailed recipe along with the ingredients. Intended Uses ============= While immediately valuable for instruction fine tuning large language models, as a corpus of instruction prompts, this dataset also presents a valuable opportunity for synthetic data generation in the methods. For example, prompt-completions could be submitted as few-shot examples to a large open language model to generate additional food recipes. Dataset ======= Load with Datasets ------------------ To load this dataset with Datasets, you'll just need to install Datasets as 'pip install datasets --upgrade' and then use the following code: Purpose of Collection --------------------- Telugu is a low-resource language where there no food recipes instruct-style dataset to the best of my knowledge. This was created as a part of Aya Open Science Initiative from Cohere For AI to make sure Telugu is well represented in the space of AI/ML. Unlike other datasets that are limited to non-commercial use, this dataset can be used, modified, and extended for any purpose, including academic or commercial applications. Sources ------- * Andhrajyothi Website: Performed webscraping from Andhrajyothi Website which is a famous telugu website consisting of food recipes in the following categories: 1. తీపి వంటలు 2. పచ్చళ్లు 3. శాకాహారం 4. మాంసాహారం * Next, performed some pre-processing of the data like removing unwanted characters, extracting ingredients and cooking instructions seperately from the scraped data. Finally, converted the scraped data into Instruct-style prompts and completions. Data Fields ----------- * 'inputs' : Prompt or input to the language model. * 'targets' : Completion or output of the language model. * 'template\_id' : Id of the template used in 'inputs' and 'targets'. * 'template\_lang': ISO code of the language used in the 'inputs' and 'targets' where *tel* refers to Telugu. Templates --------- For the creation of instruct-style prompts and completions from the scraped data, the following one template category with 7 different templates were used: 1. Given the name of the food item, generates the detailed recipe along with the ingredients. template\_id: 1, inputs: , targets: template\_id: 2, inputs: , targets: template\_id: 3, inputs: , targets: template\_id: 4, inputs: , targets: template\_id: 5, inputs: , targets: template\_id: 6, inputs: , targets: template\_id: 7, inputs: , targets: Personal or Sensitive Data -------------------------- This dataset contains public information. To our knowledge, there are no private person’s personal identifiers or sensitive information. Language -------- Telugu Known Limitations ================= * The Dataset is scraped from the food recipes website and the contents of this dataset may reflect the bias. * Although there is utmost care taken to keep the dataset as monolingual, there might be some records that may contain English Language along with Telugu. Contributors ============ SuryaKrishna02 and Desik98
[]
[ "TAGS\n#task_categories-text-generation #task_ids-language-modeling #annotations_creators-expert-generated #language_creators-expert-generated #multilinguality-monolingual #size_categories-n<1K #source_datasets-original #language-Telugu #license-apache-2.0 #food #recipes #region-us \n" ]
[ 95 ]
[ "passage: TAGS\n#task_categories-text-generation #task_ids-language-modeling #annotations_creators-expert-generated #language_creators-expert-generated #multilinguality-monolingual #size_categories-n<1K #source_datasets-original #language-Telugu #license-apache-2.0 #food #recipes #region-us \n" ]
bda20733839c34d89de2c2985b847e3873a24906
# Dataset Card for "dataset_dyal_darija_T5" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
DRAGOO/dataset_dyal_darija_T5
[ "region:us" ]
2023-08-28T18:06:28+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "conversation", "dtype": "string"}, {"name": "response", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 83191.2, "num_examples": 72}, {"name": "test", "num_bytes": 20797.8, "num_examples": 18}], "download_size": 72585, "dataset_size": 103989.0}}
2023-08-28T18:07:23+00:00
[]
[]
TAGS #region-us
# Dataset Card for "dataset_dyal_darija_T5" More Information needed
[ "# Dataset Card for \"dataset_dyal_darija_T5\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"dataset_dyal_darija_T5\"\n\nMore Information needed" ]
[ 6, 21 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"dataset_dyal_darija_T5\"\n\nMore Information needed" ]
5535a6bcb581f752af9cc09b9de428211f4c2d5c
# Dataset Card for "code_search_net_python_filtered_top50k" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
bjoernp/code_search_net_python_filtered_top50k
[ "region:us" ]
2023-08-28T18:14:42+00:00
{"dataset_info": {"features": [{"name": "code", "dtype": "string"}, {"name": "signature", "dtype": "string"}, {"name": "docstring", "dtype": "string"}, {"name": "loss_without_docstring", "dtype": "float64"}, {"name": "loss_with_docstring", "dtype": "float64"}, {"name": "factor", "dtype": "float64"}], "splits": [{"name": "train", "num_bytes": 46636060.557325035, "num_examples": 50023}], "download_size": 15599036, "dataset_size": 46636060.557325035}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-08-28T18:14:47+00:00
[]
[]
TAGS #region-us
# Dataset Card for "code_search_net_python_filtered_top50k" More Information needed
[ "# Dataset Card for \"code_search_net_python_filtered_top50k\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"code_search_net_python_filtered_top50k\"\n\nMore Information needed" ]
[ 6, 25 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"code_search_net_python_filtered_top50k\"\n\nMore Information needed" ]
7b8bd3dd45f911bd807244048e358356c07feeb7
# Dataset Card for "code_search_net_python_processed_400k" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
bjoernp/code_search_net_python_processed_400k
[ "region:us" ]
2023-08-28T18:21:58+00:00
{"dataset_info": {"features": [{"name": "code", "dtype": "string"}, {"name": "signature", "dtype": "string"}, {"name": "docstring", "dtype": "string"}, {"name": "loss_without_docstring", "dtype": "float64"}, {"name": "loss_with_docstring", "dtype": "float64"}, {"name": "factor", "dtype": "float64"}], "splits": [{"name": "train", "num_bytes": 373144422, "num_examples": 400244}], "download_size": 150980039, "dataset_size": 373144422}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-08-28T18:22:06+00:00
[]
[]
TAGS #region-us
# Dataset Card for "code_search_net_python_processed_400k" More Information needed
[ "# Dataset Card for \"code_search_net_python_processed_400k\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"code_search_net_python_processed_400k\"\n\nMore Information needed" ]
[ 6, 24 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"code_search_net_python_processed_400k\"\n\nMore Information needed" ]
18f799ab7c4da46d5043ed05f3774e4a5fac08f3
# Dataset Card for "code_search_net_filtered_top100" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
bjoernp/code_search_net_filtered_top100
[ "region:us" ]
2023-08-28T18:24:22+00:00
{"dataset_info": {"features": [{"name": "code", "dtype": "string"}, {"name": "signature", "dtype": "string"}, {"name": "docstring", "dtype": "string"}, {"name": "loss_without_docstring", "dtype": "float64"}, {"name": "loss_with_docstring", "dtype": "float64"}, {"name": "factor", "dtype": "float64"}, {"name": "__index_level_0__", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 166530, "num_examples": 100}], "download_size": 70792, "dataset_size": 166530}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-08-28T18:24:24+00:00
[]
[]
TAGS #region-us
# Dataset Card for "code_search_net_filtered_top100" More Information needed
[ "# Dataset Card for \"code_search_net_filtered_top100\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"code_search_net_filtered_top100\"\n\nMore Information needed" ]
[ 6, 21 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"code_search_net_filtered_top100\"\n\nMore Information needed" ]
a4ccc772aaa35b10b1987697d41b2354437d5726
# OpenMP Question Answering Dataset OpenMP Question Answering Dataset is a new OpenMP question answering introduced in paper "LM4HPC: Towards Effective Language Model Application in High-Performance Computing". It is designed to probe the capabilities of language models in single-turn interactions with users. Similar to other QA datasets, we include some request-response pairs which are not strictly question-answering pairs. The categories and examples of questions in the OMPQA dataset can be found in below table. | **Category** | **Count** | **Example Questions** | | ------------ | --------- | --------------------- | | Basics | 40 | What is a worksharing construct in OpenMP? | | Examples | 20 | Give an example OpenMP C code for computing PI using numerical integration. | | Compilers | 24 | In what language is LLVM written? <br> How is a parallel region represented in Clang? | | Benchmarks | 23 | What are the NAS Parallel benchmarks? <br> Which benchmark assesses data race detection tools? | # Data Usage The dataset is provided in a CSV file, with each entry in the CSV table representing a pair of question and answer. # Contribute Welcome to join us and become a contributor to this project! If you want to share some datasets, put them in csv file and email to lechen AT iastate.edu. Thank you! # Citation If you use the data collection, code, or experimental findings in this repository, please cite our IWOMP paper: @article{chen2023lm4hpc, title={LM4HPC: Towards Effective Language Model Application in High-Performance Computing}, author={Chen, Le and Lin, Pei-Hung and Vanderbruggen, Tristan and Liao, Chunhua and Emani, Murali and de Supinski, Bronis}, journal={arXiv preprint arXiv:2306.14979}, year={2023} }
chenle015/OpenMP_Question_Answering
[ "license:bsd", "region:us" ]
2023-08-28T18:31:47+00:00
{"license": "bsd"}
2023-08-29T15:19:22+00:00
[]
[]
TAGS #license-bsd #region-us
OpenMP Question Answering Dataset ================================= OpenMP Question Answering Dataset is a new OpenMP question answering introduced in paper "LM4HPC: Towards Effective Language Model Application in High-Performance Computing". It is designed to probe the capabilities of language models in single-turn interactions with users. Similar to other QA datasets, we include some request-response pairs which are not strictly question-answering pairs. The categories and examples of questions in the OMPQA dataset can be found in below table. Category: Basics, Count: 40, Example Questions: What is a worksharing construct in OpenMP? Category: Examples, Count: 20, Example Questions: Give an example OpenMP C code for computing PI using numerical integration. Category: Compilers, Count: 24, Example Questions: In what language is LLVM written? How is a parallel region represented in Clang? Category: Benchmarks, Count: 23, Example Questions: What are the NAS Parallel benchmarks? Which benchmark assesses data race detection tools? Data Usage ========== The dataset is provided in a CSV file, with each entry in the CSV table representing a pair of question and answer. Contribute ========== Welcome to join us and become a contributor to this project! If you want to share some datasets, put them in csv file and email to lechen AT URL. Thank you! If you use the data collection, code, or experimental findings in this repository, please cite our IWOMP paper: @article{chen2023lm4hpc, title={LM4HPC: Towards Effective Language Model Application in High-Performance Computing}, author={Chen, Le and Lin, Pei-Hung and Vanderbruggen, Tristan and Liao, Chunhua and Emani, Murali and de Supinski, Bronis}, journal={arXiv preprint arXiv:2306.14979}, year={2023} }
[]
[ "TAGS\n#license-bsd #region-us \n" ]
[ 12 ]
[ "passage: TAGS\n#license-bsd #region-us \n" ]
c7b18168b7017e16b81288a2d75bf21619feaae7
# Dataset Card for "AO3_fandom_chatbot" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ebony59/AO3_fandom_chatbot
[ "region:us" ]
2023-08-28T18:46:24+00:00
{"dataset_info": {"features": [{"name": "conversation", "list": [{"name": "content", "dtype": "string"}, {"name": "do_train", "dtype": "bool"}, {"name": "role", "dtype": "string"}]}], "splits": [{"name": "train", "num_bytes": 2269557, "num_examples": 1036}], "download_size": 1161469, "dataset_size": 2269557}}
2023-09-28T14:59:55+00:00
[]
[]
TAGS #region-us
# Dataset Card for "AO3_fandom_chatbot" More Information needed
[ "# Dataset Card for \"AO3_fandom_chatbot\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"AO3_fandom_chatbot\"\n\nMore Information needed" ]
[ 6, 18 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"AO3_fandom_chatbot\"\n\nMore Information needed" ]
06631cbb390fd2801cbfe19c2a0df562a1938074
# Dataset Card for Evaluation run of jondurbin/airoboros-l2-70b-2.1 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/jondurbin/airoboros-l2-70b-2.1 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [jondurbin/airoboros-l2-70b-2.1](https://huggingface.co/jondurbin/airoboros-l2-70b-2.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_jondurbin__airoboros-l2-70b-2.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-22T02:46:34.884332](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-l2-70b-2.1/blob/main/results_2023-10-22T02-46-34.884332.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.4175755033557047, "em_stderr": 0.005050413933234051, "f1": 0.5395018875838982, "f1_stderr": 0.004589160014706508, "acc": 0.5122650377908212, "acc_stderr": 0.011071703465026875 }, "harness|drop|3": { "em": 0.4175755033557047, "em_stderr": 0.005050413933234051, "f1": 0.5395018875838982, "f1_stderr": 0.004589160014706508 }, "harness|gsm8k|5": { "acc": 0.2100075815011372, "acc_stderr": 0.011219441626913245 }, "harness|winogrande|5": { "acc": 0.8145224940805051, "acc_stderr": 0.010923965303140505 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_jondurbin__airoboros-l2-70b-2.1
[ "region:us" ]
2023-08-28T18:48:12+00:00
{"pretty_name": "Evaluation run of jondurbin/airoboros-l2-70b-2.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [jondurbin/airoboros-l2-70b-2.1](https://huggingface.co/jondurbin/airoboros-l2-70b-2.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jondurbin__airoboros-l2-70b-2.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-22T02:46:34.884332](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-l2-70b-2.1/blob/main/results_2023-10-22T02-46-34.884332.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.4175755033557047,\n \"em_stderr\": 0.005050413933234051,\n \"f1\": 0.5395018875838982,\n \"f1_stderr\": 0.004589160014706508,\n \"acc\": 0.5122650377908212,\n \"acc_stderr\": 0.011071703465026875\n },\n \"harness|drop|3\": {\n \"em\": 0.4175755033557047,\n \"em_stderr\": 0.005050413933234051,\n \"f1\": 0.5395018875838982,\n \"f1_stderr\": 0.004589160014706508\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.2100075815011372,\n \"acc_stderr\": 0.011219441626913245\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8145224940805051,\n \"acc_stderr\": 0.010923965303140505\n }\n}\n```", "repo_url": "https://huggingface.co/jondurbin/airoboros-l2-70b-2.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|arc:challenge|25_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_22T02_46_34.884332", "path": ["**/details_harness|drop|3_2023-10-22T02-46-34.884332.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-22T02-46-34.884332.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_22T02_46_34.884332", "path": ["**/details_harness|gsm8k|5_2023-10-22T02-46-34.884332.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-22T02-46-34.884332.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hellaswag|10_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-31T13:20:37.537573.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-31T13:20:37.537573.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-31T13:20:37.537573.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_22T02_46_34.884332", "path": ["**/details_harness|winogrande|5_2023-10-22T02-46-34.884332.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-22T02-46-34.884332.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_31T13_20_37.537573", "path": ["results_2023-08-31T13:20:37.537573.parquet"]}, {"split": "2023_10_22T02_46_34.884332", "path": ["results_2023-10-22T02-46-34.884332.parquet"]}, {"split": "latest", "path": ["results_2023-10-22T02-46-34.884332.parquet"]}]}]}
2023-10-22T01:46:46+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of jondurbin/airoboros-l2-70b-2.1 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model jondurbin/airoboros-l2-70b-2.1 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-22T02:46:34.884332(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of jondurbin/airoboros-l2-70b-2.1", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model jondurbin/airoboros-l2-70b-2.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-22T02:46:34.884332(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of jondurbin/airoboros-l2-70b-2.1", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model jondurbin/airoboros-l2-70b-2.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-22T02:46:34.884332(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 23, 31, 171, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of jondurbin/airoboros-l2-70b-2.1## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model jondurbin/airoboros-l2-70b-2.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-22T02:46:34.884332(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
9afead128d8979e3ed82fafeb1252425b8f063ab
# Dataset Card for "kullm-v2_ppl" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
nayohan/kullm-v2_ppl
[ "region:us" ]
2023-08-28T18:54:45+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "instruction", "dtype": "string"}, {"name": "input", "dtype": "string"}, {"name": "output", "dtype": "string"}, {"name": "ppl", "dtype": "float64"}, {"name": "len", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 215898169, "num_examples": 144069}], "download_size": 108642849, "dataset_size": 215898169}}
2023-08-28T18:55:39+00:00
[]
[]
TAGS #region-us
# Dataset Card for "kullm-v2_ppl" More Information needed
[ "# Dataset Card for \"kullm-v2_ppl\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"kullm-v2_ppl\"\n\nMore Information needed" ]
[ 6, 18 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"kullm-v2_ppl\"\n\nMore Information needed" ]
fed8fb809643a7c1e29a6b7993d8c5e2817462f5
**NOTE** This dataset is under development. To load the dataset and prepare it for DPO training. ```python from datasets import load_dataset ds = load_dataset("plaguss/the_office_ds_dwight_qa") def return_prompt_and_responses(samples) -> dict[str, str]: return { "prompt": "Question: " + samples["prompt"] + "\n\nAnswer: ", "chosen": samples[samples["label"]], "rejected": samples["response_2" if samples["label"] == "response_1" else "response_1"], } rm_columns = ["response_1", "response_2", "is_dwight"] ds = ds.map( return_prompt_and_responses, remove_columns=rm_columns ) ```
plaguss/the_office_ds_dwight_qa
[ "task_categories:conversational", "size_categories:1K<n<10K", "language:en", "license:mit", "art", "region:us" ]
2023-08-28T19:07:17+00:00
{"language": ["en"], "license": "mit", "size_categories": ["1K<n<10K"], "task_categories": ["conversational"], "tags": ["art"]}
2023-08-29T12:51:14+00:00
[]
[ "en" ]
TAGS #task_categories-conversational #size_categories-1K<n<10K #language-English #license-mit #art #region-us
NOTE This dataset is under development. To load the dataset and prepare it for DPO training.
[]
[ "TAGS\n#task_categories-conversational #size_categories-1K<n<10K #language-English #license-mit #art #region-us \n" ]
[ 39 ]
[ "passage: TAGS\n#task_categories-conversational #size_categories-1K<n<10K #language-English #license-mit #art #region-us \n" ]
4cf7989761b2b14946f7055d8d4b682c1ba703c2
# Dataset Card for "kdom_classification" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
daveK91/kdom_classification
[ "region:us" ]
2023-08-28T19:19:44+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "pixel_values", "sequence": {"sequence": {"sequence": {"sequence": "float32"}}}}, {"name": "labels", "dtype": {"class_label": {"names": {"0": "Anderes", "1": "Dreikoenigenschrein", "2": "Gerokreuz"}}}}], "splits": [{"name": "train", "num_bytes": 90724200.0, "num_examples": 150}, {"name": "test", "num_bytes": 19354496.0, "num_examples": 32}, {"name": "validation", "num_bytes": 16330356.0, "num_examples": 27}], "download_size": 28668560, "dataset_size": 126409052.0}}
2023-08-28T19:20:18+00:00
[]
[]
TAGS #region-us
# Dataset Card for "kdom_classification" More Information needed
[ "# Dataset Card for \"kdom_classification\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"kdom_classification\"\n\nMore Information needed" ]
[ 6, 15 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"kdom_classification\"\n\nMore Information needed" ]
76fec79c4fa6a3074b6e12777de5f20351fca314
# Dataset Card for "Text-to-sql_label" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Mohanrajv27/Text-to-sql_label
[ "region:us" ]
2023-08-28T19:26:36+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "instruction", "dtype": "string"}, {"name": "response", "dtype": "string"}, {"name": "input_ids", "sequence": "int32"}, {"name": "attention_mask", "sequence": "int8"}, {"name": "labels", "sequence": "int64"}], "splits": [{"name": "train", "num_bytes": 247128590.95745364, "num_examples": 235987}, {"name": "test", "num_bytes": 27458965.042546377, "num_examples": 26221}], "download_size": 93816148, "dataset_size": 274587556.0}}
2023-08-28T21:38:43+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Text-to-sql_label" More Information needed
[ "# Dataset Card for \"Text-to-sql_label\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Text-to-sql_label\"\n\nMore Information needed" ]
[ 6, 18 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"Text-to-sql_label\"\n\nMore Information needed" ]
ed55853e009b842dc3adb44525dc8cb9ade631a1
# Dataset Card for Evaluation run of zarakiquemparte/zaraxls-l2-7b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/zarakiquemparte/zaraxls-l2-7b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [zarakiquemparte/zaraxls-l2-7b](https://huggingface.co/zarakiquemparte/zaraxls-l2-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_zarakiquemparte__zaraxls-l2-7b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-18T09:59:01.594012](https://huggingface.co/datasets/open-llm-leaderboard/details_zarakiquemparte__zaraxls-l2-7b/blob/main/results_2023-09-18T09-59-01.594012.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.2003984899328859, "em_stderr": 0.0040994319015717805, "f1": 0.3159343540268478, "f1_stderr": 0.004169444956344296, "acc": 0.36696200812243857, "acc_stderr": 0.006882749087214294 }, "harness|drop|3": { "em": 0.2003984899328859, "em_stderr": 0.0040994319015717805, "f1": 0.3159343540268478, "f1_stderr": 0.004169444956344296 }, "harness|gsm8k|5": { "acc": 0.002274450341167551, "acc_stderr": 0.0013121578148673923 }, "harness|winogrande|5": { "acc": 0.7316495659037096, "acc_stderr": 0.012453340359561195 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_zarakiquemparte__zaraxls-l2-7b
[ "region:us" ]
2023-08-28T19:29:23+00:00
{"pretty_name": "Evaluation run of zarakiquemparte/zaraxls-l2-7b", "dataset_summary": "Dataset automatically created during the evaluation run of model [zarakiquemparte/zaraxls-l2-7b](https://huggingface.co/zarakiquemparte/zaraxls-l2-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_zarakiquemparte__zaraxls-l2-7b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-18T09:59:01.594012](https://huggingface.co/datasets/open-llm-leaderboard/details_zarakiquemparte__zaraxls-l2-7b/blob/main/results_2023-09-18T09-59-01.594012.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.2003984899328859,\n \"em_stderr\": 0.0040994319015717805,\n \"f1\": 0.3159343540268478,\n \"f1_stderr\": 0.004169444956344296,\n \"acc\": 0.36696200812243857,\n \"acc_stderr\": 0.006882749087214294\n },\n \"harness|drop|3\": {\n \"em\": 0.2003984899328859,\n \"em_stderr\": 0.0040994319015717805,\n \"f1\": 0.3159343540268478,\n \"f1_stderr\": 0.004169444956344296\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.002274450341167551,\n \"acc_stderr\": 0.0013121578148673923\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7316495659037096,\n \"acc_stderr\": 0.012453340359561195\n }\n}\n```", "repo_url": "https://huggingface.co/zarakiquemparte/zaraxls-l2-7b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|arc:challenge|25_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_18T09_59_01.594012", "path": ["**/details_harness|drop|3_2023-09-18T09-59-01.594012.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-18T09-59-01.594012.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_18T09_59_01.594012", "path": ["**/details_harness|gsm8k|5_2023-09-18T09-59-01.594012.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-18T09-59-01.594012.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hellaswag|10_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-28T20:28:21.792080.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-28T20:28:21.792080.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-28T20:28:21.792080.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_18T09_59_01.594012", "path": ["**/details_harness|winogrande|5_2023-09-18T09-59-01.594012.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-18T09-59-01.594012.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_28T20_28_21.792080", "path": ["results_2023-08-28T20:28:21.792080.parquet"]}, {"split": "2023_09_18T09_59_01.594012", "path": ["results_2023-09-18T09-59-01.594012.parquet"]}, {"split": "latest", "path": ["results_2023-09-18T09-59-01.594012.parquet"]}]}]}
2023-09-18T08:59:13+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of zarakiquemparte/zaraxls-l2-7b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model zarakiquemparte/zaraxls-l2-7b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-18T09:59:01.594012(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of zarakiquemparte/zaraxls-l2-7b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model zarakiquemparte/zaraxls-l2-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-18T09:59:01.594012(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of zarakiquemparte/zaraxls-l2-7b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model zarakiquemparte/zaraxls-l2-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-18T09:59:01.594012(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 23, 31, 171, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of zarakiquemparte/zaraxls-l2-7b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model zarakiquemparte/zaraxls-l2-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-18T09:59:01.594012(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
8fc958e4ea6fc697dea326041f7dd74a80997d4b
# Dataset Card for "edm_wavset" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
sebascorreia/jazz-set
[ "region:us" ]
2023-08-28T19:46:07+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "audio_file", "dtype": "string"}, {"name": "slice", "dtype": "int16"}], "splits": [{"name": "train", "num_bytes": 82089970.0, "num_examples": 1848}], "download_size": 81976967, "dataset_size": 82089970.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-08-30T13:30:53+00:00
[]
[]
TAGS #region-us
# Dataset Card for "edm_wavset" More Information needed
[ "# Dataset Card for \"edm_wavset\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"edm_wavset\"\n\nMore Information needed" ]
[ 6, 16 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"edm_wavset\"\n\nMore Information needed" ]
191c2c945ac40368b35449e7967fcc187631c4fe
# Dataset Card for "generative-ai-dataset-002" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
darthlordvictor/generative-ai-dataset-002
[ "region:us" ]
2023-08-28T19:57:34+00:00
{"dataset_info": {"features": [{"name": "product_name", "dtype": "string"}, {"name": "product_description", "dtype": "string"}, {"name": "marketing_email", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 123258, "num_examples": 99}], "download_size": 0, "dataset_size": 123258}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-09-04T19:49:41+00:00
[]
[]
TAGS #region-us
# Dataset Card for "generative-ai-dataset-002" More Information needed
[ "# Dataset Card for \"generative-ai-dataset-002\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"generative-ai-dataset-002\"\n\nMore Information needed" ]
[ 6, 19 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"generative-ai-dataset-002\"\n\nMore Information needed" ]
718b15c778204e15a952420efb4723f2ba99cae2
# Dataset Card for "github-sample-65536tokens-llama" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
awettig/github-sample-65536tokens-llama
[ "region:us" ]
2023-08-28T19:58:46+00:00
{"dataset_info": {"features": [{"name": "input_ids", "sequence": "int32"}], "splits": [{"name": "train", "num_bytes": 329257888, "num_examples": 1256}], "download_size": 78876374, "dataset_size": 329257888}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-08-28T19:58:51+00:00
[]
[]
TAGS #region-us
# Dataset Card for "github-sample-65536tokens-llama" More Information needed
[ "# Dataset Card for \"github-sample-65536tokens-llama\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"github-sample-65536tokens-llama\"\n\nMore Information needed" ]
[ 6, 23 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"github-sample-65536tokens-llama\"\n\nMore Information needed" ]
9d0debbc9b96c1475f1bfaf0f00919d221acaa9f
# Dataset Card for "arxiv-sample-65536tokens-llama" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
awettig/arxiv-sample-65536tokens-llama
[ "region:us" ]
2023-08-28T20:18:44+00:00
{"dataset_info": {"features": [{"name": "input_ids", "sequence": "int32"}], "splits": [{"name": "train", "num_bytes": 128976816, "num_examples": 492}], "download_size": 43974626, "dataset_size": 128976816}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-08-28T20:18:46+00:00
[]
[]
TAGS #region-us
# Dataset Card for "arxiv-sample-65536tokens-llama" More Information needed
[ "# Dataset Card for \"arxiv-sample-65536tokens-llama\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"arxiv-sample-65536tokens-llama\"\n\nMore Information needed" ]
[ 6, 24 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"arxiv-sample-65536tokens-llama\"\n\nMore Information needed" ]
9aac86111bd5ac7ce066ed46ec2f78e635e33fbc
# Dataset Card for "prepare_dataset_train_batch1" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
spsither/prepare_dataset_train_batch1
[ "region:us" ]
2023-08-28T20:18:57+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "dataset_info": {"features": [{"name": "input_features", "sequence": {"sequence": "float32"}}, {"name": "labels", "sequence": "int64"}], "splits": [{"name": "train", "num_bytes": 95822010464, "num_examples": 99760}], "download_size": 6089546764, "dataset_size": 95822010464}}
2023-08-29T04:29:58+00:00
[]
[]
TAGS #region-us
# Dataset Card for "prepare_dataset_train_batch1" More Information needed
[ "# Dataset Card for \"prepare_dataset_train_batch1\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"prepare_dataset_train_batch1\"\n\nMore Information needed" ]
[ 6, 22 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"prepare_dataset_train_batch1\"\n\nMore Information needed" ]
d737852fdabd4119dd0f9bdabf04c59d0c7c0069
# Dataset Card for "tiny-codes-standardized" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
HydraLM/tiny-codes-standardized
[ "region:us" ]
2023-08-28T20:26:52+00:00
{"dataset_info": {"features": [{"name": "message", "dtype": "string"}, {"name": "message_type", "dtype": "string"}, {"name": "message_id", "dtype": "int64"}, {"name": "conversation_id", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 3678763115, "num_examples": 3264618}], "download_size": 1264753822, "dataset_size": 3678763115}}
2023-08-30T19:15:16+00:00
[]
[]
TAGS #region-us
# Dataset Card for "tiny-codes-standardized" More Information needed
[ "# Dataset Card for \"tiny-codes-standardized\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"tiny-codes-standardized\"\n\nMore Information needed" ]
[ 6, 17 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"tiny-codes-standardized\"\n\nMore Information needed" ]
4fca1b6b5d82921bb38c5653bf2f9ca61731920f
# Dataset Card for "Evol-Instruct-Code-80k-v1-standardized" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
HydraLM/Evol-Instruct-Code-80k-v1-standardized
[ "region:us" ]
2023-08-28T20:49:45+00:00
{"dataset_info": {"features": [{"name": "message", "dtype": "string"}, {"name": "message_type", "dtype": "string"}, {"name": "message_id", "dtype": "int64"}, {"name": "conversation_id", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 120580750, "num_examples": 156528}], "download_size": 52351077, "dataset_size": 120580750}}
2023-08-30T19:28:05+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Evol-Instruct-Code-80k-v1-standardized" More Information needed
[ "# Dataset Card for \"Evol-Instruct-Code-80k-v1-standardized\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Evol-Instruct-Code-80k-v1-standardized\"\n\nMore Information needed" ]
[ 6, 24 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"Evol-Instruct-Code-80k-v1-standardized\"\n\nMore Information needed" ]
f4698c706adaf491cbbfe432b174fa5d7ba98b0f
# Dataset Card for "instruct-python-500k-standardized" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
HydraLM/instruct-python-500k-standardized
[ "region:us" ]
2023-08-28T20:58:47+00:00
{"dataset_info": {"features": [{"name": "message", "dtype": "string"}, {"name": "message_type", "dtype": "string"}, {"name": "message_id", "dtype": "int64"}, {"name": "conversation_id", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 1010030074, "num_examples": 1002698}], "download_size": 529792228, "dataset_size": 1010030074}}
2023-08-30T19:23:27+00:00
[]
[]
TAGS #region-us
# Dataset Card for "instruct-python-500k-standardized" More Information needed
[ "# Dataset Card for \"instruct-python-500k-standardized\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"instruct-python-500k-standardized\"\n\nMore Information needed" ]
[ 6, 20 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"instruct-python-500k-standardized\"\n\nMore Information needed" ]
fa7f6dca7f9645f95c37d183ebc773f960580096
# Dataset Card for "prepare_dataset_train_batch0" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
spsither/prepare_dataset_train_batch0
[ "region:us" ]
2023-08-28T21:27:26+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "dataset_info": {"features": [{"name": "input_features", "sequence": {"sequence": "float32"}}, {"name": "labels", "sequence": "int64"}], "splits": [{"name": "train", "num_bytes": 95821956240, "num_examples": 99760}], "download_size": 156274877, "dataset_size": 95821956240}}
2023-08-29T03:37:38+00:00
[]
[]
TAGS #region-us
# Dataset Card for "prepare_dataset_train_batch0" More Information needed
[ "# Dataset Card for \"prepare_dataset_train_batch0\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"prepare_dataset_train_batch0\"\n\nMore Information needed" ]
[ 6, 22 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"prepare_dataset_train_batch0\"\n\nMore Information needed" ]