sha
stringlengths
40
40
text
stringlengths
1
13.4M
id
stringlengths
2
117
tags
listlengths
1
7.91k
created_at
stringlengths
25
25
metadata
stringlengths
2
875k
last_modified
stringlengths
25
25
arxiv
listlengths
0
25
languages
listlengths
0
7.91k
tags_str
stringlengths
17
159k
text_str
stringlengths
1
447k
text_lists
listlengths
0
352
processed_texts
listlengths
1
353
tokens_length
listlengths
1
353
input_texts
listlengths
1
40
6d3dbf98b21168c7a2868fd59277cf4d18dd8392
# Dataset Card for "eval_tag_squad_v9" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tyzhu/eval_tag_squad_v9
[ "region:us" ]
2023-10-03T07:08:38+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "title", "dtype": "string"}, {"name": "context", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "answers", "sequence": [{"name": "text", "dtype": "string"}, {"name": "answer_start", "dtype": "int32"}]}, {"name": "inputs", "dtype": "string"}, {"name": "targets", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 13273785, "num_examples": 10570}, {"name": "validation", "num_bytes": 13273785, "num_examples": 10570}], "download_size": 5722530, "dataset_size": 26547570}}
2023-10-05T15:55:32+00:00
[]
[]
TAGS #region-us
# Dataset Card for "eval_tag_squad_v9" More Information needed
[ "# Dataset Card for \"eval_tag_squad_v9\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"eval_tag_squad_v9\"\n\nMore Information needed" ]
[ 6, 20 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"eval_tag_squad_v9\"\n\nMore Information needed" ]
2fb99d55609a610a62126c327199fc37e08beee0
# Dataset Card for "helicopter_drawing_descriptions" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Falah/helicopter_drawing_descriptions
[ "region:us" ]
2023-10-03T07:10:28+00:00
{"dataset_info": {"features": [{"name": "prompts", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 176717, "num_examples": 1000}], "download_size": 18746, "dataset_size": 176717}}
2023-10-03T07:10:29+00:00
[]
[]
TAGS #region-us
# Dataset Card for "helicopter_drawing_descriptions" More Information needed
[ "# Dataset Card for \"helicopter_drawing_descriptions\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"helicopter_drawing_descriptions\"\n\nMore Information needed" ]
[ 6, 20 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"helicopter_drawing_descriptions\"\n\nMore Information needed" ]
7bde739dfbfa179fea0bfb3d733016fa60a28d62
# Dataset Card for "IP2P-adwm-200" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
FelixdoingAI/IP2P-adwm-200
[ "region:us" ]
2023-10-03T07:17:32+00:00
{"dataset_info": {"features": [{"name": "original_prompt", "dtype": "string"}, {"name": "original_image", "dtype": "image"}, {"name": "edit_prompt", "dtype": "string"}, {"name": "edited_prompt", "dtype": "string"}, {"name": "edited_image", "dtype": "image"}, {"name": "adversarial_image", "dtype": "image"}], "splits": [{"name": "train", "num_bytes": 128123657.0, "num_examples": 200}], "download_size": 128127660, "dataset_size": 128123657.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-01-26T12:11:50+00:00
[]
[]
TAGS #region-us
# Dataset Card for "IP2P-adwm-200" More Information needed
[ "# Dataset Card for \"IP2P-adwm-200\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"IP2P-adwm-200\"\n\nMore Information needed" ]
[ 6, 17 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"IP2P-adwm-200\"\n\nMore Information needed" ]
34cb84da876dae7bc721873f4c1d4cadfc1f72a3
# Dataset Card for "military_drawing_descriptions" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Falah/military_drawing_descriptions
[ "region:us" ]
2023-10-03T07:23:51+00:00
{"dataset_info": {"features": [{"name": "prompts", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 163051, "num_examples": 1000}], "download_size": 18457, "dataset_size": 163051}}
2023-10-03T07:35:38+00:00
[]
[]
TAGS #region-us
# Dataset Card for "military_drawing_descriptions" More Information needed
[ "# Dataset Card for \"military_drawing_descriptions\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"military_drawing_descriptions\"\n\nMore Information needed" ]
[ 6, 19 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"military_drawing_descriptions\"\n\nMore Information needed" ]
bf78960edeeabaafe11a5ce3aef5ceeb3728fa22
# Dataset Card for "squad_paraphrases_10k" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
nguyenthanhdo/squad_paraphrases_10k
[ "region:us" ]
2023-10-03T07:30:43+00:00
{"dataset_info": {"features": [{"name": "context", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "answer", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 11644151.146560319, "num_examples": 10000}], "download_size": 6061896, "dataset_size": 11644151.146560319}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-03T07:30:50+00:00
[]
[]
TAGS #region-us
# Dataset Card for "squad_paraphrases_10k" More Information needed
[ "# Dataset Card for \"squad_paraphrases_10k\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"squad_paraphrases_10k\"\n\nMore Information needed" ]
[ 6, 19 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"squad_paraphrases_10k\"\n\nMore Information needed" ]
65c584be22ccd7da31c36513069a0c1a0cee1102
# Dataset Card for "turkish_wikipedia" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
umarigan/turkish_wikipedia
[ "task_categories:text-classification", "task_categories:translation", "task_categories:summarization", "size_categories:100K<n<1M", "language:tr", "region:us" ]
2023-10-03T07:38:29+00:00
{"language": ["tr"], "size_categories": ["100K<n<1M"], "task_categories": ["text-classification", "translation", "summarization"], "dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "text", "dtype": "string"}, {"name": "title", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 1142404262, "num_examples": 524601}], "download_size": 629924151, "dataset_size": 1142404262}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-05T06:29:16+00:00
[]
[ "tr" ]
TAGS #task_categories-text-classification #task_categories-translation #task_categories-summarization #size_categories-100K<n<1M #language-Turkish #region-us
# Dataset Card for "turkish_wikipedia" More Information needed
[ "# Dataset Card for \"turkish_wikipedia\"\n\nMore Information needed" ]
[ "TAGS\n#task_categories-text-classification #task_categories-translation #task_categories-summarization #size_categories-100K<n<1M #language-Turkish #region-us \n", "# Dataset Card for \"turkish_wikipedia\"\n\nMore Information needed" ]
[ 54, 14 ]
[ "passage: TAGS\n#task_categories-text-classification #task_categories-translation #task_categories-summarization #size_categories-100K<n<1M #language-Turkish #region-us \n# Dataset Card for \"turkish_wikipedia\"\n\nMore Information needed" ]
1587294314211ef17f7fbe24d47895517ce92966
We collect this dataset from some mental health-related subreddits in https://www.reddit.com/ to further the study of mental disorders and suicidal ideation. We name this dataset as Reddit SuicideWatch and Mental Health Collection, or SWMH for short, where discussions comprise suicide-related intention and mental disorders like depression, anxiety, and bipolar. We use the Reddit official API and develop a web spider to collect the targeted forums. This collection contains a total of 54,412 posts. Specific subreddits are listed in Table 4 of the below paper, as well as the number and the percentage of posts collected in the train-val-test split. The dataset is also available on [Zenodo](https://doi.org/10.5281/zenodo.6476179). [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6476179.svg)](https://doi.org/10.5281/zenodo.6476179) By accessing the dataset, you agree that: 1. User(s) will make no attempt to identify or contact individual participants from whom these Data were collected even though this dataset is anonymous; 2. User(s) will not distribute these data to any entity or individual beyond those specified in the approved Data Access Agreement;  3. User(s) will agree to use data only for research purposes; 4. User(s) will take all reasonable and customary measures to protect the confidential nature of materials, and avoid the disclosure or unauthorized use; 5. The data and any derivatives will be stored only on password-protected servers where access is restricted to the users using Unix group permissions;  The dataset is only for research purposes. Please use your **institutional email** to request access. Access requests from a .com email will be rejected. If you use this dataset, please cite the paper as: Ji, S., Li, X., Huang, Z. et al. Suicidal ideation and mental disorder detection with attentive relation networks. Neural Comput & Applic (2021). https://doi.org/10.1007/s00521-021-06208-y ``` @article{ji2021suicidal, title={Suicidal ideation and mental disorder detection with attentive relation networks}, author={Ji, Shaoxiong and Li, Xue and Huang, Zi and Cambria, Erik}, journal={Neural Computing and Applications}, year={2021}, publisher={Springer} } ```
AIMH/SWMH
[ "license:cc-by-nc-4.0", "region:us" ]
2023-10-03T07:43:02+00:00
{"license": "cc-by-nc-4.0"}
2023-11-24T14:45:08+00:00
[]
[]
TAGS #license-cc-by-nc-4.0 #region-us
We collect this dataset from some mental health-related subreddits in URL to further the study of mental disorders and suicidal ideation. We name this dataset as Reddit SuicideWatch and Mental Health Collection, or SWMH for short, where discussions comprise suicide-related intention and mental disorders like depression, anxiety, and bipolar. We use the Reddit official API and develop a web spider to collect the targeted forums. This collection contains a total of 54,412 posts. Specific subreddits are listed in Table 4 of the below paper, as well as the number and the percentage of posts collected in the train-val-test split. The dataset is also available on Zenodo. ![DOI](URL By accessing the dataset, you agree that: 1. User(s) will make no attempt to identify or contact individual participants from whom these Data were collected even though this dataset is anonymous; 2. User(s) will not distribute these data to any entity or individual beyond those specified in the approved Data Access Agreement;  3. User(s) will agree to use data only for research purposes; 4. User(s) will take all reasonable and customary measures to protect the confidential nature of materials, and avoid the disclosure or unauthorized use; 5. The data and any derivatives will be stored only on password-protected servers where access is restricted to the users using Unix group permissions;  The dataset is only for research purposes. Please use your institutional email to request access. Access requests from a .com email will be rejected. If you use this dataset, please cite the paper as: Ji, S., Li, X., Huang, Z. et al. Suicidal ideation and mental disorder detection with attentive relation networks. Neural Comput & Applic (2021). URL
[]
[ "TAGS\n#license-cc-by-nc-4.0 #region-us \n" ]
[ 17 ]
[ "passage: TAGS\n#license-cc-by-nc-4.0 #region-us \n" ]
726367cdc73f0f74a36b3c5cec7abfb65ffb554d
# Dataset Card for "6a3f723d" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
result-kand2-sdxl-wuerst-karlo/6a3f723d
[ "region:us" ]
2023-10-03T07:48:04+00:00
{"dataset_info": {"features": [{"name": "result", "dtype": "string"}, {"name": "id", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 162, "num_examples": 10}], "download_size": 1317, "dataset_size": 162}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-03T07:48:05+00:00
[]
[]
TAGS #region-us
# Dataset Card for "6a3f723d" More Information needed
[ "# Dataset Card for \"6a3f723d\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"6a3f723d\"\n\nMore Information needed" ]
[ 6, 17 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"6a3f723d\"\n\nMore Information needed" ]
b70606568d71edc72eb744bdaa14a22c9e88daf5
# Dataset Card for "040dec0a" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
result-kand2-sdxl-wuerst-karlo/040dec0a
[ "region:us" ]
2023-10-03T07:51:40+00:00
{"dataset_info": {"features": [{"name": "result", "dtype": "string"}, {"name": "id", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 160, "num_examples": 10}], "download_size": 1292, "dataset_size": 160}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-03T07:51:41+00:00
[]
[]
TAGS #region-us
# Dataset Card for "040dec0a" More Information needed
[ "# Dataset Card for \"040dec0a\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"040dec0a\"\n\nMore Information needed" ]
[ 6, 15 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"040dec0a\"\n\nMore Information needed" ]
fe604a6915900e7b049d4be295217cc55e4b8719
# Dataset Card for "f4d8fc49" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
result-kand2-sdxl-wuerst-karlo/f4d8fc49
[ "region:us" ]
2023-10-03T07:54:44+00:00
{"dataset_info": {"features": [{"name": "result", "dtype": "string"}, {"name": "id", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 159, "num_examples": 10}], "download_size": 1306, "dataset_size": 159}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-03T07:54:45+00:00
[]
[]
TAGS #region-us
# Dataset Card for "f4d8fc49" More Information needed
[ "# Dataset Card for \"f4d8fc49\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"f4d8fc49\"\n\nMore Information needed" ]
[ 6, 16 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"f4d8fc49\"\n\nMore Information needed" ]
3418dc393e09bc89e8a0f2a9ee1cfd0a5d436a5a
# Dataset Card for "animal_drawing_descriptions" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Falah/animal_drawing_descriptions
[ "region:us" ]
2023-10-03T08:00:26+00:00
{"dataset_info": {"features": [{"name": "prompts", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 156491, "num_examples": 1000}], "download_size": 18803, "dataset_size": 156491}}
2023-10-03T08:00:27+00:00
[]
[]
TAGS #region-us
# Dataset Card for "animal_drawing_descriptions" More Information needed
[ "# Dataset Card for \"animal_drawing_descriptions\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"animal_drawing_descriptions\"\n\nMore Information needed" ]
[ 6, 18 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"animal_drawing_descriptions\"\n\nMore Information needed" ]
fe977e124e5190280cca4887f51a1312ebd8916a
werdna696
we-r-ai/stunning
[ "task_categories:text-classification", "size_categories:n<1K", "license:apache-2.0", "art", "ai art", "doi:10.57967/hf/1184", "region:us" ]
2023-10-03T08:17:50+00:00
{"license": "apache-2.0", "size_categories": ["n<1K"], "task_categories": ["text-classification"], "pretty_name": "nawdre mod", "tags": ["art", "ai art"]}
2023-10-04T12:24:07+00:00
[]
[]
TAGS #task_categories-text-classification #size_categories-n<1K #license-apache-2.0 #art #ai art #doi-10.57967/hf/1184 #region-us
werdna696
[]
[ "TAGS\n#task_categories-text-classification #size_categories-n<1K #license-apache-2.0 #art #ai art #doi-10.57967/hf/1184 #region-us \n" ]
[ 52 ]
[ "passage: TAGS\n#task_categories-text-classification #size_categories-n<1K #license-apache-2.0 #art #ai art #doi-10.57967/hf/1184 #region-us \n" ]
60ea14ea138c53fec73f58fe20b8a5578ff7610e
# Dataset Card for "guanaco-llama2-200" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
gauravtopre9/guanaco-llama2-200
[ "region:us" ]
2023-10-03T08:44:14+00:00
{"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 338808, "num_examples": 200}], "download_size": 201257, "dataset_size": 338808}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-03T08:44:15+00:00
[]
[]
TAGS #region-us
# Dataset Card for "guanaco-llama2-200" More Information needed
[ "# Dataset Card for \"guanaco-llama2-200\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"guanaco-llama2-200\"\n\nMore Information needed" ]
[ 6, 18 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"guanaco-llama2-200\"\n\nMore Information needed" ]
03f0cbbf80104c68f0fabcbb1b4ce8aa9696781c
# Dataset Card for "squad_first_sent_v4_train_30_eval_10" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tyzhu/squad_first_sent_v4_train_30_eval_10
[ "region:us" ]
2023-10-03T09:00:10+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "title", "dtype": "string"}, {"name": "context", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "answers", "struct": [{"name": "answer_start", "sequence": "int64"}, {"name": "text", "sequence": "string"}]}, {"name": "context_id", "dtype": "string"}, {"name": "inputs", "dtype": "string"}, {"name": "targets", "dtype": "string"}, {"name": "__index_level_0__", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 111024, "num_examples": 70}, {"name": "validation", "num_bytes": 11592, "num_examples": 10}, {"name": "eval_first_sent", "num_bytes": 11592, "num_examples": 10}], "download_size": 102146, "dataset_size": 134208}}
2023-10-03T09:41:48+00:00
[]
[]
TAGS #region-us
# Dataset Card for "squad_first_sent_v4_train_30_eval_10" More Information needed
[ "# Dataset Card for \"squad_first_sent_v4_train_30_eval_10\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"squad_first_sent_v4_train_30_eval_10\"\n\nMore Information needed" ]
[ 6, 30 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"squad_first_sent_v4_train_30_eval_10\"\n\nMore Information needed" ]
b3d53a353c93ac018d449c1f0a399598fff323fc
# Dataset Card for LLM Performance ### Dataset Summary This table presents a comprehensive comparative analysis of a few popular LLMs, such as Falcon, LLama 2, and Mistral, highlighting both the quality of their outputs and the corresponding inference times. We finetuned the Falcon model with the full Alpaca dataset of 52k datapoints and with randomly sampled 5k datapoints and then compared them with base and instruct versions of Falcon, LLama 2 and Mistral. All models are with 7B parameters and in int4 representation.
zivicmilos/llm-performance
[ "region:us" ]
2023-10-03T09:10:25+00:00
{}
2023-10-03T10:26:28+00:00
[]
[]
TAGS #region-us
# Dataset Card for LLM Performance ### Dataset Summary This table presents a comprehensive comparative analysis of a few popular LLMs, such as Falcon, LLama 2, and Mistral, highlighting both the quality of their outputs and the corresponding inference times. We finetuned the Falcon model with the full Alpaca dataset of 52k datapoints and with randomly sampled 5k datapoints and then compared them with base and instruct versions of Falcon, LLama 2 and Mistral. All models are with 7B parameters and in int4 representation.
[ "# Dataset Card for LLM Performance", "### Dataset Summary\n\nThis table presents a comprehensive comparative analysis of a few popular LLMs, such as Falcon, LLama 2, and Mistral, highlighting both the quality of their outputs and the corresponding inference times. We finetuned the Falcon model with the full Alpaca dataset of 52k datapoints and with randomly sampled 5k datapoints and then compared them with base and instruct versions of Falcon, LLama 2 and Mistral. All models are with 7B parameters and in int4 representation." ]
[ "TAGS\n#region-us \n", "# Dataset Card for LLM Performance", "### Dataset Summary\n\nThis table presents a comprehensive comparative analysis of a few popular LLMs, such as Falcon, LLama 2, and Mistral, highlighting both the quality of their outputs and the corresponding inference times. We finetuned the Falcon model with the full Alpaca dataset of 52k datapoints and with randomly sampled 5k datapoints and then compared them with base and instruct versions of Falcon, LLama 2 and Mistral. All models are with 7B parameters and in int4 representation." ]
[ 6, 8, 120 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for LLM Performance### Dataset Summary\n\nThis table presents a comprehensive comparative analysis of a few popular LLMs, such as Falcon, LLama 2, and Mistral, highlighting both the quality of their outputs and the corresponding inference times. We finetuned the Falcon model with the full Alpaca dataset of 52k datapoints and with randomly sampled 5k datapoints and then compared them with base and instruct versions of Falcon, LLama 2 and Mistral. All models are with 7B parameters and in int4 representation." ]
9847916f22deb77c438b24e2dbb706d3341c888f
# Dataset Card for Evaluation run of PY007/TinyLlama-1.1B-Chat-v0.1 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/PY007/TinyLlama-1.1B-Chat-v0.1 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [PY007/TinyLlama-1.1B-Chat-v0.1](https://huggingface.co/PY007/TinyLlama-1.1B-Chat-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_PY007__TinyLlama-1.1B-Chat-v0.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-26T06:25:57.926077](https://huggingface.co/datasets/open-llm-leaderboard/details_PY007__TinyLlama-1.1B-Chat-v0.1/blob/main/results_2023-10-26T06-25-57.926077.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.006606543624161074, "em_stderr": 0.0008296357389922025, "f1": 0.0474685402684564, "f1_stderr": 0.001397428828037708, "acc": 0.27731808735540847, "acc_stderr": 0.00799261729499414 }, "harness|drop|3": { "em": 0.006606543624161074, "em_stderr": 0.0008296357389922025, "f1": 0.0474685402684564, "f1_stderr": 0.001397428828037708 }, "harness|gsm8k|5": { "acc": 0.00530705079605762, "acc_stderr": 0.0020013057209480427 }, "harness|winogrande|5": { "acc": 0.5493291239147593, "acc_stderr": 0.013983928869040239 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_PY007__TinyLlama-1.1B-Chat-v0.1
[ "region:us" ]
2023-10-03T09:21:47+00:00
{"pretty_name": "Evaluation run of PY007/TinyLlama-1.1B-Chat-v0.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [PY007/TinyLlama-1.1B-Chat-v0.1](https://huggingface.co/PY007/TinyLlama-1.1B-Chat-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_PY007__TinyLlama-1.1B-Chat-v0.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-26T06:25:57.926077](https://huggingface.co/datasets/open-llm-leaderboard/details_PY007__TinyLlama-1.1B-Chat-v0.1/blob/main/results_2023-10-26T06-25-57.926077.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.006606543624161074,\n \"em_stderr\": 0.0008296357389922025,\n \"f1\": 0.0474685402684564,\n \"f1_stderr\": 0.001397428828037708,\n \"acc\": 0.27731808735540847,\n \"acc_stderr\": 0.00799261729499414\n },\n \"harness|drop|3\": {\n \"em\": 0.006606543624161074,\n \"em_stderr\": 0.0008296357389922025,\n \"f1\": 0.0474685402684564,\n \"f1_stderr\": 0.001397428828037708\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.00530705079605762,\n \"acc_stderr\": 0.0020013057209480427\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5493291239147593,\n \"acc_stderr\": 0.013983928869040239\n }\n}\n```", "repo_url": "https://huggingface.co/PY007/TinyLlama-1.1B-Chat-v0.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|arc:challenge|25_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_26T06_25_57.926077", "path": ["**/details_harness|drop|3_2023-10-26T06-25-57.926077.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-26T06-25-57.926077.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_26T06_25_57.926077", "path": ["**/details_harness|gsm8k|5_2023-10-26T06-25-57.926077.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-26T06-25-57.926077.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hellaswag|10_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T10-21-28.182244.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T10-21-28.182244.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T10-21-28.182244.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_26T06_25_57.926077", "path": ["**/details_harness|winogrande|5_2023-10-26T06-25-57.926077.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-26T06-25-57.926077.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T10_21_28.182244", "path": ["results_2023-10-03T10-21-28.182244.parquet"]}, {"split": "2023_10_26T06_25_57.926077", "path": ["results_2023-10-26T06-25-57.926077.parquet"]}, {"split": "latest", "path": ["results_2023-10-26T06-25-57.926077.parquet"]}]}]}
2023-10-26T05:26:10+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of PY007/TinyLlama-1.1B-Chat-v0.1 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model PY007/TinyLlama-1.1B-Chat-v0.1 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-26T06:25:57.926077(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of PY007/TinyLlama-1.1B-Chat-v0.1", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model PY007/TinyLlama-1.1B-Chat-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-26T06:25:57.926077(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of PY007/TinyLlama-1.1B-Chat-v0.1", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model PY007/TinyLlama-1.1B-Chat-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-26T06:25:57.926077(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 25, 31, 173, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of PY007/TinyLlama-1.1B-Chat-v0.1## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model PY007/TinyLlama-1.1B-Chat-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-26T06:25:57.926077(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
90e5827187c2199147fbe963ece313651d9bbbce
# Dataset Card for "embeddings_from_distilbert_class_heaps_and_eval1perc" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
johannes-garstenauer/embeddings_from_distilbert_class_heaps_and_eval1perc
[ "region:us" ]
2023-10-03T09:26:52+00:00
{"dataset_info": {"features": [{"name": "struct", "dtype": "string"}, {"name": "label", "dtype": "int64"}, {"name": "pred", "dtype": "int64"}, {"name": "last_hidden_state", "sequence": {"sequence": "float32"}}, {"name": "cls_layer_6", "sequence": "float32"}, {"name": "cls_layer_5", "sequence": "float32"}, {"name": "cls_layer_4", "sequence": "float32"}], "splits": [{"name": "train", "num_bytes": 4263758393, "num_examples": 2691}], "download_size": 4185738962, "dataset_size": 4263758393}}
2023-10-03T09:31:49+00:00
[]
[]
TAGS #region-us
# Dataset Card for "embeddings_from_distilbert_class_heaps_and_eval1perc" More Information needed
[ "# Dataset Card for \"embeddings_from_distilbert_class_heaps_and_eval1perc\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"embeddings_from_distilbert_class_heaps_and_eval1perc\"\n\nMore Information needed" ]
[ 6, 32 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"embeddings_from_distilbert_class_heaps_and_eval1perc\"\n\nMore Information needed" ]
a41e39ae191096a7c7788244eee78af94192d6d0
# Dataset Card for Evaluation run of TheBloke/Llama-2-7b-Chat-AWQ ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/TheBloke/Llama-2-7b-Chat-AWQ - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [TheBloke/Llama-2-7b-Chat-AWQ](https://huggingface.co/TheBloke/Llama-2-7b-Chat-AWQ) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TheBloke__Llama-2-7b-Chat-AWQ", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-24T01:23:20.549960](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__Llama-2-7b-Chat-AWQ/blob/main/results_2023-10-24T01-23-20.549960.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0, "em_stderr": 0.0, "f1": 0.0, "f1_stderr": 0.0, "acc": 0.23756906077348067, "acc_stderr": 0.007017551441813875 }, "harness|drop|3": { "em": 0.0, "em_stderr": 0.0, "f1": 0.0, "f1_stderr": 0.0 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 }, "harness|winogrande|5": { "acc": 0.47513812154696133, "acc_stderr": 0.01403510288362775 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_TheBloke__Llama-2-7b-Chat-AWQ
[ "region:us" ]
2023-10-03T09:54:45+00:00
{"pretty_name": "Evaluation run of TheBloke/Llama-2-7b-Chat-AWQ", "dataset_summary": "Dataset automatically created during the evaluation run of model [TheBloke/Llama-2-7b-Chat-AWQ](https://huggingface.co/TheBloke/Llama-2-7b-Chat-AWQ) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TheBloke__Llama-2-7b-Chat-AWQ\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-24T01:23:20.549960](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__Llama-2-7b-Chat-AWQ/blob/main/results_2023-10-24T01-23-20.549960.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0,\n \"em_stderr\": 0.0,\n \"f1\": 0.0,\n \"f1_stderr\": 0.0,\n \"acc\": 0.23756906077348067,\n \"acc_stderr\": 0.007017551441813875\n },\n \"harness|drop|3\": {\n \"em\": 0.0,\n \"em_stderr\": 0.0,\n \"f1\": 0.0,\n \"f1_stderr\": 0.0\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.47513812154696133,\n \"acc_stderr\": 0.01403510288362775\n }\n}\n```", "repo_url": "https://huggingface.co/TheBloke/Llama-2-7b-Chat-AWQ", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|arc:challenge|25_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_24T01_23_20.549960", "path": ["**/details_harness|drop|3_2023-10-24T01-23-20.549960.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-24T01-23-20.549960.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_24T01_23_20.549960", "path": ["**/details_harness|gsm8k|5_2023-10-24T01-23-20.549960.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-24T01-23-20.549960.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hellaswag|10_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T10-54-21.847398.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T10-54-21.847398.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T10-54-21.847398.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_24T01_23_20.549960", "path": ["**/details_harness|winogrande|5_2023-10-24T01-23-20.549960.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-24T01-23-20.549960.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T10_54_21.847398", "path": ["results_2023-10-03T10-54-21.847398.parquet"]}, {"split": "2023_10_24T01_23_20.549960", "path": ["results_2023-10-24T01-23-20.549960.parquet"]}, {"split": "latest", "path": ["results_2023-10-24T01-23-20.549960.parquet"]}]}]}
2023-10-24T00:23:33+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of TheBloke/Llama-2-7b-Chat-AWQ ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model TheBloke/Llama-2-7b-Chat-AWQ on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-24T01:23:20.549960(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of TheBloke/Llama-2-7b-Chat-AWQ", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model TheBloke/Llama-2-7b-Chat-AWQ on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T01:23:20.549960(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of TheBloke/Llama-2-7b-Chat-AWQ", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model TheBloke/Llama-2-7b-Chat-AWQ on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T01:23:20.549960(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 24, 31, 172, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of TheBloke/Llama-2-7b-Chat-AWQ## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model TheBloke/Llama-2-7b-Chat-AWQ on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-24T01:23:20.549960(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
68cb81d1d7f4fdf8e22e8c18560a62d4f8a07be8
# Dataset Card for Evaluation run of LTC-AI-Labs/L2-7b-Base-WVG-Uncensored ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/LTC-AI-Labs/L2-7b-Base-WVG-Uncensored - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [LTC-AI-Labs/L2-7b-Base-WVG-Uncensored](https://huggingface.co/LTC-AI-Labs/L2-7b-Base-WVG-Uncensored) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_LTC-AI-Labs__L2-7b-Base-WVG-Uncensored", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-23T14:27:39.647282](https://huggingface.co/datasets/open-llm-leaderboard/details_LTC-AI-Labs__L2-7b-Base-WVG-Uncensored/blob/main/results_2023-10-23T14-27-39.647282.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.001363255033557047, "em_stderr": 0.0003778609196461104, "f1": 0.05628670302013431, "f1_stderr": 0.0013136825712446111, "acc": 0.4109445880229037, "acc_stderr": 0.00959942926369336 }, "harness|drop|3": { "em": 0.001363255033557047, "em_stderr": 0.0003778609196461104, "f1": 0.05628670302013431, "f1_stderr": 0.0013136825712446111 }, "harness|gsm8k|5": { "acc": 0.07050796057619409, "acc_stderr": 0.0070515438139836135 }, "harness|winogrande|5": { "acc": 0.7513812154696132, "acc_stderr": 0.012147314713403107 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_LTC-AI-Labs__L2-7b-Base-WVG-Uncensored
[ "region:us" ]
2023-10-03T09:59:08+00:00
{"pretty_name": "Evaluation run of LTC-AI-Labs/L2-7b-Base-WVG-Uncensored", "dataset_summary": "Dataset automatically created during the evaluation run of model [LTC-AI-Labs/L2-7b-Base-WVG-Uncensored](https://huggingface.co/LTC-AI-Labs/L2-7b-Base-WVG-Uncensored) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_LTC-AI-Labs__L2-7b-Base-WVG-Uncensored\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-23T14:27:39.647282](https://huggingface.co/datasets/open-llm-leaderboard/details_LTC-AI-Labs__L2-7b-Base-WVG-Uncensored/blob/main/results_2023-10-23T14-27-39.647282.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.001363255033557047,\n \"em_stderr\": 0.0003778609196461104,\n \"f1\": 0.05628670302013431,\n \"f1_stderr\": 0.0013136825712446111,\n \"acc\": 0.4109445880229037,\n \"acc_stderr\": 0.00959942926369336\n },\n \"harness|drop|3\": {\n \"em\": 0.001363255033557047,\n \"em_stderr\": 0.0003778609196461104,\n \"f1\": 0.05628670302013431,\n \"f1_stderr\": 0.0013136825712446111\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.07050796057619409,\n \"acc_stderr\": 0.0070515438139836135\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7513812154696132,\n \"acc_stderr\": 0.012147314713403107\n }\n}\n```", "repo_url": "https://huggingface.co/LTC-AI-Labs/L2-7b-Base-WVG-Uncensored", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|arc:challenge|25_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_23T14_27_39.647282", "path": ["**/details_harness|drop|3_2023-10-23T14-27-39.647282.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-23T14-27-39.647282.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_23T14_27_39.647282", "path": ["**/details_harness|gsm8k|5_2023-10-23T14-27-39.647282.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-23T14-27-39.647282.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hellaswag|10_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T10-58-44.594405.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T10-58-44.594405.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T10-58-44.594405.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_23T14_27_39.647282", "path": ["**/details_harness|winogrande|5_2023-10-23T14-27-39.647282.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-23T14-27-39.647282.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T10_58_44.594405", "path": ["results_2023-10-03T10-58-44.594405.parquet"]}, {"split": "2023_10_23T14_27_39.647282", "path": ["results_2023-10-23T14-27-39.647282.parquet"]}, {"split": "latest", "path": ["results_2023-10-23T14-27-39.647282.parquet"]}]}]}
2023-10-23T13:27:52+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of LTC-AI-Labs/L2-7b-Base-WVG-Uncensored ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model LTC-AI-Labs/L2-7b-Base-WVG-Uncensored on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-23T14:27:39.647282(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of LTC-AI-Labs/L2-7b-Base-WVG-Uncensored", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model LTC-AI-Labs/L2-7b-Base-WVG-Uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T14:27:39.647282(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of LTC-AI-Labs/L2-7b-Base-WVG-Uncensored", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model LTC-AI-Labs/L2-7b-Base-WVG-Uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T14:27:39.647282(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 31, 31, 179, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of LTC-AI-Labs/L2-7b-Base-WVG-Uncensored## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model LTC-AI-Labs/L2-7b-Base-WVG-Uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-23T14:27:39.647282(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
ad6f614a844298a35a9924c3637c9a3769b6370a
# Dataset Card for Evaluation run of stabilityai/stablelm-3b-4e1t ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/stabilityai/stablelm-3b-4e1t - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [stabilityai/stablelm-3b-4e1t](https://huggingface.co/stabilityai/stablelm-3b-4e1t) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 3 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_stabilityai__stablelm-3b-4e1t_public", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-11-08T16:27:49.205374](https://huggingface.co/datasets/open-llm-leaderboard/details_stabilityai__stablelm-3b-4e1t_public/blob/main/results_2023-11-08T16-27-49.205374.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0016778523489932886, "em_stderr": 0.00041913301788267703, "f1": 0.053592701342281994, "f1_stderr": 0.001271488426848693, "acc": 0.3726382606707983, "acc_stderr": 0.008837083686710946 }, "harness|drop|3": { "em": 0.0016778523489932886, "em_stderr": 0.00041913301788267703, "f1": 0.053592701342281994, "f1_stderr": 0.001271488426848693 }, "harness|gsm8k|5": { "acc": 0.03335860500379075, "acc_stderr": 0.004946282649173774 }, "harness|winogrande|5": { "acc": 0.7119179163378059, "acc_stderr": 0.012727884724248116 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_stabilityai__stablelm-3b-4e1t
[ "region:us" ]
2023-10-03T10:07:42+00:00
{"pretty_name": "Evaluation run of stabilityai/stablelm-3b-4e1t", "dataset_summary": "Dataset automatically created during the evaluation run of model [stabilityai/stablelm-3b-4e1t](https://huggingface.co/stabilityai/stablelm-3b-4e1t) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_stabilityai__stablelm-3b-4e1t_public\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-11-08T16:27:49.205374](https://huggingface.co/datasets/open-llm-leaderboard/details_stabilityai__stablelm-3b-4e1t_public/blob/main/results_2023-11-08T16-27-49.205374.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0016778523489932886,\n \"em_stderr\": 0.00041913301788267703,\n \"f1\": 0.053592701342281994,\n \"f1_stderr\": 0.001271488426848693,\n \"acc\": 0.3726382606707983,\n \"acc_stderr\": 0.008837083686710946\n },\n \"harness|drop|3\": {\n \"em\": 0.0016778523489932886,\n \"em_stderr\": 0.00041913301788267703,\n \"f1\": 0.053592701342281994,\n \"f1_stderr\": 0.001271488426848693\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.03335860500379075,\n \"acc_stderr\": 0.004946282649173774\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7119179163378059,\n \"acc_stderr\": 0.012727884724248116\n }\n}\n```", "repo_url": "https://huggingface.co/stabilityai/stablelm-3b-4e1t", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_drop_3", "data_files": [{"split": "2023_11_08T16_27_49.205374", "path": ["**/details_harness|drop|3_2023-11-08T16-27-49.205374.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-11-08T16-27-49.205374.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_11_08T16_27_49.205374", "path": ["**/details_harness|gsm8k|5_2023-11-08T16-27-49.205374.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-11-08T16-27-49.205374.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_11_08T16_27_49.205374", "path": ["**/details_harness|winogrande|5_2023-11-08T16-27-49.205374.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-11-08T16-27-49.205374.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_11_08T16_27_49.205374", "path": ["results_2023-11-08T16-27-49.205374.parquet"]}, {"split": "latest", "path": ["results_2023-11-08T16-27-49.205374.parquet"]}]}]}
2023-12-01T14:50:35+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of stabilityai/stablelm-3b-4e1t ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model stabilityai/stablelm-3b-4e1t on the Open LLM Leaderboard. The dataset is composed of 3 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-11-08T16:27:49.205374(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of stabilityai/stablelm-3b-4e1t", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model stabilityai/stablelm-3b-4e1t on the Open LLM Leaderboard.\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-11-08T16:27:49.205374(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of stabilityai/stablelm-3b-4e1t", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model stabilityai/stablelm-3b-4e1t on the Open LLM Leaderboard.\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-11-08T16:27:49.205374(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 22, 31, 171, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of stabilityai/stablelm-3b-4e1t## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model stabilityai/stablelm-3b-4e1t on the Open LLM Leaderboard.\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-11-08T16:27:49.205374(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
0488881ea50c0403ddb07276e8437156fa71af57
# Dataset Card for Evaluation run of YeungNLP/firefly-llama2-13b-chat ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/YeungNLP/firefly-llama2-13b-chat - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [YeungNLP/firefly-llama2-13b-chat](https://huggingface.co/YeungNLP/firefly-llama2-13b-chat) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_YeungNLP__firefly-llama2-13b-chat", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-23T07:10:32.244484](https://huggingface.co/datasets/open-llm-leaderboard/details_YeungNLP__firefly-llama2-13b-chat/blob/main/results_2023-10-23T07-10-32.244484.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.11954697986577181, "em_stderr": 0.003322477507478937, "f1": 0.21568372483221465, "f1_stderr": 0.0035544327528378426, "acc": 0.4229972001701799, "acc_stderr": 0.010210734400170219 }, "harness|drop|3": { "em": 0.11954697986577181, "em_stderr": 0.003322477507478937, "f1": 0.21568372483221465, "f1_stderr": 0.0035544327528378426 }, "harness|gsm8k|5": { "acc": 0.09855951478392722, "acc_stderr": 0.008210320350946333 }, "harness|winogrande|5": { "acc": 0.7474348855564326, "acc_stderr": 0.012211148449394105 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_YeungNLP__firefly-llama2-13b-chat
[ "region:us" ]
2023-10-03T10:22:34+00:00
{"pretty_name": "Evaluation run of YeungNLP/firefly-llama2-13b-chat", "dataset_summary": "Dataset automatically created during the evaluation run of model [YeungNLP/firefly-llama2-13b-chat](https://huggingface.co/YeungNLP/firefly-llama2-13b-chat) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_YeungNLP__firefly-llama2-13b-chat\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-23T07:10:32.244484](https://huggingface.co/datasets/open-llm-leaderboard/details_YeungNLP__firefly-llama2-13b-chat/blob/main/results_2023-10-23T07-10-32.244484.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.11954697986577181,\n \"em_stderr\": 0.003322477507478937,\n \"f1\": 0.21568372483221465,\n \"f1_stderr\": 0.0035544327528378426,\n \"acc\": 0.4229972001701799,\n \"acc_stderr\": 0.010210734400170219\n },\n \"harness|drop|3\": {\n \"em\": 0.11954697986577181,\n \"em_stderr\": 0.003322477507478937,\n \"f1\": 0.21568372483221465,\n \"f1_stderr\": 0.0035544327528378426\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.09855951478392722,\n \"acc_stderr\": 0.008210320350946333\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7474348855564326,\n \"acc_stderr\": 0.012211148449394105\n }\n}\n```", "repo_url": "https://huggingface.co/YeungNLP/firefly-llama2-13b-chat", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|arc:challenge|25_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_23T07_10_32.244484", "path": ["**/details_harness|drop|3_2023-10-23T07-10-32.244484.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-23T07-10-32.244484.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_23T07_10_32.244484", "path": ["**/details_harness|gsm8k|5_2023-10-23T07-10-32.244484.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-23T07-10-32.244484.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hellaswag|10_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T11-22-10.318112.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T11-22-10.318112.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T11-22-10.318112.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_23T07_10_32.244484", "path": ["**/details_harness|winogrande|5_2023-10-23T07-10-32.244484.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-23T07-10-32.244484.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T11_22_10.318112", "path": ["results_2023-10-03T11-22-10.318112.parquet"]}, {"split": "2023_10_23T07_10_32.244484", "path": ["results_2023-10-23T07-10-32.244484.parquet"]}, {"split": "latest", "path": ["results_2023-10-23T07-10-32.244484.parquet"]}]}]}
2023-10-23T06:10:44+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of YeungNLP/firefly-llama2-13b-chat ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model YeungNLP/firefly-llama2-13b-chat on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-23T07:10:32.244484(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of YeungNLP/firefly-llama2-13b-chat", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model YeungNLP/firefly-llama2-13b-chat on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T07:10:32.244484(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of YeungNLP/firefly-llama2-13b-chat", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model YeungNLP/firefly-llama2-13b-chat on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T07:10:32.244484(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 24, 31, 172, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of YeungNLP/firefly-llama2-13b-chat## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model YeungNLP/firefly-llama2-13b-chat on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-23T07:10:32.244484(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
1ce188482628733fa16a44af47c78ba817a7247b
# Dataset Card for "mental_health_chatbot_dataset" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
umitmertcakmak/mental_health_chatbot_dataset
[ "region:us" ]
2023-10-03T10:34:30+00:00
{"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 189421, "num_examples": 172}], "download_size": 102272, "dataset_size": 189421}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-03T10:34:32+00:00
[]
[]
TAGS #region-us
# Dataset Card for "mental_health_chatbot_dataset" More Information needed
[ "# Dataset Card for \"mental_health_chatbot_dataset\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"mental_health_chatbot_dataset\"\n\nMore Information needed" ]
[ 6, 19 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"mental_health_chatbot_dataset\"\n\nMore Information needed" ]
51272206da36097bb3d4276a400ab63f19171bb6
# Dataset Card for Evaluation run of PocketDoc/Dans-MysteryModel-13b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/PocketDoc/Dans-MysteryModel-13b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [PocketDoc/Dans-MysteryModel-13b](https://huggingface.co/PocketDoc/Dans-MysteryModel-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_PocketDoc__Dans-MysteryModel-13b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-24T21:16:01.308166](https://huggingface.co/datasets/open-llm-leaderboard/details_PocketDoc__Dans-MysteryModel-13b/blob/main/results_2023-10-24T21-16-01.308166.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.38066275167785235, "em_stderr": 0.004972484553923351, "f1": 0.43489723154362553, "f1_stderr": 0.004826488404910045, "acc": 0.3741120757695343, "acc_stderr": 0.006099244550129889 }, "harness|drop|3": { "em": 0.38066275167785235, "em_stderr": 0.004972484553923351, "f1": 0.43489723154362553, "f1_stderr": 0.004826488404910045 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 }, "harness|winogrande|5": { "acc": 0.7482241515390686, "acc_stderr": 0.012198489100259778 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_PocketDoc__Dans-MysteryModel-13b
[ "region:us" ]
2023-10-03T10:39:47+00:00
{"pretty_name": "Evaluation run of PocketDoc/Dans-MysteryModel-13b", "dataset_summary": "Dataset automatically created during the evaluation run of model [PocketDoc/Dans-MysteryModel-13b](https://huggingface.co/PocketDoc/Dans-MysteryModel-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_PocketDoc__Dans-MysteryModel-13b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-24T21:16:01.308166](https://huggingface.co/datasets/open-llm-leaderboard/details_PocketDoc__Dans-MysteryModel-13b/blob/main/results_2023-10-24T21-16-01.308166.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.38066275167785235,\n \"em_stderr\": 0.004972484553923351,\n \"f1\": 0.43489723154362553,\n \"f1_stderr\": 0.004826488404910045,\n \"acc\": 0.3741120757695343,\n \"acc_stderr\": 0.006099244550129889\n },\n \"harness|drop|3\": {\n \"em\": 0.38066275167785235,\n \"em_stderr\": 0.004972484553923351,\n \"f1\": 0.43489723154362553,\n \"f1_stderr\": 0.004826488404910045\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7482241515390686,\n \"acc_stderr\": 0.012198489100259778\n }\n}\n```", "repo_url": "https://huggingface.co/PocketDoc/Dans-MysteryModel-13b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|arc:challenge|25_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_24T21_16_01.308166", "path": ["**/details_harness|drop|3_2023-10-24T21-16-01.308166.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-24T21-16-01.308166.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_24T21_16_01.308166", "path": ["**/details_harness|gsm8k|5_2023-10-24T21-16-01.308166.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-24T21-16-01.308166.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hellaswag|10_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T11-39-23.450846.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T11-39-23.450846.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T11-39-23.450846.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_24T21_16_01.308166", "path": ["**/details_harness|winogrande|5_2023-10-24T21-16-01.308166.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-24T21-16-01.308166.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T11_39_23.450846", "path": ["results_2023-10-03T11-39-23.450846.parquet"]}, {"split": "2023_10_24T21_16_01.308166", "path": ["results_2023-10-24T21-16-01.308166.parquet"]}, {"split": "latest", "path": ["results_2023-10-24T21-16-01.308166.parquet"]}]}]}
2023-10-24T20:16:14+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of PocketDoc/Dans-MysteryModel-13b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model PocketDoc/Dans-MysteryModel-13b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-24T21:16:01.308166(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of PocketDoc/Dans-MysteryModel-13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model PocketDoc/Dans-MysteryModel-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T21:16:01.308166(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of PocketDoc/Dans-MysteryModel-13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model PocketDoc/Dans-MysteryModel-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T21:16:01.308166(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 22, 31, 170, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of PocketDoc/Dans-MysteryModel-13b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model PocketDoc/Dans-MysteryModel-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-24T21:16:01.308166(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
e20f16245dbc56989a968adaaa49af0cc6cd197a
# Dataset Card for Evaluation run of migtissera/Synthia-13B-v1.2 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/migtissera/Synthia-13B-v1.2 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [migtissera/Synthia-13B-v1.2](https://huggingface.co/migtissera/Synthia-13B-v1.2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 3 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_migtissera__Synthia-13B-v1.2_public", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-11-06T21:31:35.338838](https://huggingface.co/datasets/open-llm-leaderboard/details_migtissera__Synthia-13B-v1.2_public/blob/main/results_2023-11-06T21-31-35.338838.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.1863464765100671, "em_stderr": 0.003987677232655252, "f1": 0.2547860738255037, "f1_stderr": 0.004029636733616552, "acc": 0.4373652518320964, "acc_stderr": 0.010268101875758134 }, "harness|drop|3": { "em": 0.1863464765100671, "em_stderr": 0.003987677232655252, "f1": 0.2547860738255037, "f1_stderr": 0.004029636733616552 }, "harness|gsm8k|5": { "acc": 0.10993176648976498, "acc_stderr": 0.008616195587865394 }, "harness|winogrande|5": { "acc": 0.7647987371744278, "acc_stderr": 0.011920008163650877 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_migtissera__Synthia-13B-v1.2
[ "region:us" ]
2023-10-03T10:42:14+00:00
{"pretty_name": "Evaluation run of migtissera/Synthia-13B-v1.2", "dataset_summary": "Dataset automatically created during the evaluation run of model [migtissera/Synthia-13B-v1.2](https://huggingface.co/migtissera/Synthia-13B-v1.2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_migtissera__Synthia-13B-v1.2_public\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-11-06T21:31:35.338838](https://huggingface.co/datasets/open-llm-leaderboard/details_migtissera__Synthia-13B-v1.2_public/blob/main/results_2023-11-06T21-31-35.338838.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.1863464765100671,\n \"em_stderr\": 0.003987677232655252,\n \"f1\": 0.2547860738255037,\n \"f1_stderr\": 0.004029636733616552,\n \"acc\": 0.4373652518320964,\n \"acc_stderr\": 0.010268101875758134\n },\n \"harness|drop|3\": {\n \"em\": 0.1863464765100671,\n \"em_stderr\": 0.003987677232655252,\n \"f1\": 0.2547860738255037,\n \"f1_stderr\": 0.004029636733616552\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.10993176648976498,\n \"acc_stderr\": 0.008616195587865394\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7647987371744278,\n \"acc_stderr\": 0.011920008163650877\n }\n}\n```", "repo_url": "https://huggingface.co/migtissera/Synthia-13B-v1.2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_drop_3", "data_files": [{"split": "2023_11_04T18_02_40.204522", "path": ["**/details_harness|drop|3_2023-11-04T18-02-40.204522.parquet"]}, {"split": "2023_11_06T21_31_35.338838", "path": ["**/details_harness|drop|3_2023-11-06T21-31-35.338838.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-11-06T21-31-35.338838.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_11_04T18_02_40.204522", "path": ["**/details_harness|gsm8k|5_2023-11-04T18-02-40.204522.parquet"]}, {"split": "2023_11_06T21_31_35.338838", "path": ["**/details_harness|gsm8k|5_2023-11-06T21-31-35.338838.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-11-06T21-31-35.338838.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_11_04T18_02_40.204522", "path": ["**/details_harness|winogrande|5_2023-11-04T18-02-40.204522.parquet"]}, {"split": "2023_11_06T21_31_35.338838", "path": ["**/details_harness|winogrande|5_2023-11-06T21-31-35.338838.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-11-06T21-31-35.338838.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_11_04T18_02_40.204522", "path": ["results_2023-11-04T18-02-40.204522.parquet"]}, {"split": "2023_11_06T21_31_35.338838", "path": ["results_2023-11-06T21-31-35.338838.parquet"]}, {"split": "latest", "path": ["results_2023-11-06T21-31-35.338838.parquet"]}]}]}
2023-12-01T14:03:46+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of migtissera/Synthia-13B-v1.2 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model migtissera/Synthia-13B-v1.2 on the Open LLM Leaderboard. The dataset is composed of 3 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-11-06T21:31:35.338838(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of migtissera/Synthia-13B-v1.2", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model migtissera/Synthia-13B-v1.2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-11-06T21:31:35.338838(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of migtissera/Synthia-13B-v1.2", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model migtissera/Synthia-13B-v1.2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-11-06T21:31:35.338838(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 22, 31, 171, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of migtissera/Synthia-13B-v1.2## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model migtissera/Synthia-13B-v1.2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-11-06T21:31:35.338838(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
5749a4652fab98970df5965851292c280d5c218e
# Silicone Masks Biometric Attacks The dataset consists of videos of individuals and attacks with printed 2D masks and silicone masks . Videos are filmed in different lightning conditions (*in a dark room, daylight, light room and nightlight*). Dataset includes videos of people with different attributes (*glasses, mask, hat, hood, wigs and mustaches for men*). ### Types of videos in the dataset: - **real** - real video of the person - **outline** -video of the person wearing a printed 2D mask - **silicone** - video of the person wearing a silicone mask ![](https://www.googleapis.com/download/storage/v1/b/kaggle-user-content/o/inbox%2F12421376%2Ff9be1f70a38085709c85716b212cdd11%2FFrame%2027.png?generation=1696329340111093&alt=media) ## Full version of the dataset includes 5792 videos ### Types and number of videos in the full dataset: - **2885** real videos of people - **2859** videos of people wearing silicone mask - **48** videos of people wearing a 2D mask. ### Gender of people in the dataset: - women: **2685** - men: **3107** The dataset serves as a valuable resource for computer vision, anti-spoofing tasks, video analysis, and security systems. It allows for the development of algorithms and models that can effectively detect attacks. Studying the dataset may lead to the development of improved *security systems, surveillance technologies, and solutions to mitigate the risks associated with masked individuals carrying out attacks*. # Get the dataset ### This is just an example of the data Leave a request on [**https://trainingdata.pro/data-market**](https://trainingdata.pro/data-market?utm_source=huggingface&utm_medium=cpc&utm_campaign=silicone-masks-biometric-attacks) to discuss your requirements, learn about the price and buy the dataset. # Content - **real** - contains of real videos of people, - **mask** - contains of videos with people wearing a printed 2D mask, - **silicone** - contains of videos with people wearing a silicone mask, - **dataset_info.csvl** - includes the information about videos in the dataset ### File with the extension .csv - **video**: link to the video, - **type**: type of the video # Attacks might be collected in accordance with your requirements. ## [TrainingData](https://trainingdata.pro/data-market?utm_source=huggingface&utm_medium=cpc&utm_campaign=silicone-masks-biometric-attacks) provides high-quality data annotation tailored to your needs More datasets in TrainingData's Kaggle account: **https://www.kaggle.com/trainingdatapro/datasets** TrainingData's GitHub: **https://github.com/trainingdata-pro**
TrainingDataPro/silicone-masks-biometric-attacks
[ "task_categories:video-classification", "language:en", "license:cc-by-nc-nd-4.0", "code", "finance", "region:us" ]
2023-10-03T10:45:42+00:00
{"language": ["en"], "license": "cc-by-nc-nd-4.0", "task_categories": ["video-classification"], "tags": ["code", "finance"], "dataset_info": {"features": [{"name": "id", "dtype": "int32"}, {"name": "name", "dtype": "string"}, {"name": "video", "dtype": "string"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "real", "1": "silicone", "2": "mask"}}}}], "splits": [{"name": "train", "num_bytes": 2394, "num_examples": 62}], "download_size": 156861504, "dataset_size": 2394}}
2023-10-12T06:35:07+00:00
[]
[ "en" ]
TAGS #task_categories-video-classification #language-English #license-cc-by-nc-nd-4.0 #code #finance #region-us
# Silicone Masks Biometric Attacks The dataset consists of videos of individuals and attacks with printed 2D masks and silicone masks . Videos are filmed in different lightning conditions (*in a dark room, daylight, light room and nightlight*). Dataset includes videos of people with different attributes (*glasses, mask, hat, hood, wigs and mustaches for men*). ### Types of videos in the dataset: - real - real video of the person - outline -video of the person wearing a printed 2D mask - silicone - video of the person wearing a silicone mask ![](URL ## Full version of the dataset includes 5792 videos ### Types and number of videos in the full dataset: - 2885 real videos of people - 2859 videos of people wearing silicone mask - 48 videos of people wearing a 2D mask. ### Gender of people in the dataset: - women: 2685 - men: 3107 The dataset serves as a valuable resource for computer vision, anti-spoofing tasks, video analysis, and security systems. It allows for the development of algorithms and models that can effectively detect attacks. Studying the dataset may lead to the development of improved *security systems, surveillance technologies, and solutions to mitigate the risks associated with masked individuals carrying out attacks*. # Get the dataset ### This is just an example of the data Leave a request on URL to discuss your requirements, learn about the price and buy the dataset. # Content - real - contains of real videos of people, - mask - contains of videos with people wearing a printed 2D mask, - silicone - contains of videos with people wearing a silicone mask, - dataset_info.csvl - includes the information about videos in the dataset ### File with the extension .csv - video: link to the video, - type: type of the video # Attacks might be collected in accordance with your requirements. ## TrainingData provides high-quality data annotation tailored to your needs More datasets in TrainingData's Kaggle account: URL TrainingData's GitHub: URL
[ "# Silicone Masks Biometric Attacks\nThe dataset consists of videos of individuals and attacks with printed 2D masks and silicone masks . Videos are filmed in different lightning conditions (*in a dark room, daylight, light room and nightlight*). Dataset includes videos of people with different attributes (*glasses, mask, hat, hood, wigs and mustaches for men*).", "### Types of videos in the dataset:\n- real - real video of the person \n- outline -video of the person wearing a printed 2D mask\n- silicone - video of the person wearing a silicone mask\n\n![](URL", "## Full version of the dataset includes 5792 videos", "### Types and number of videos in the full dataset:\n- 2885 real videos of people\n- 2859 videos of people wearing silicone mask\n- 48 videos of people wearing a 2D mask.", "### Gender of people in the dataset:\n- women: 2685\n- men: 3107\n\nThe dataset serves as a valuable resource for computer vision, anti-spoofing tasks, video analysis, and security systems. It allows for the development of algorithms and models that can effectively detect attacks.\n\nStudying the dataset may lead to the development of improved *security systems, surveillance technologies, and solutions to mitigate the risks associated with masked individuals carrying out attacks*.", "# Get the dataset", "### This is just an example of the data\n\nLeave a request on URL to discuss your requirements, learn about the price and buy the dataset.", "# Content\n- real - contains of real videos of people,\n- mask - contains of videos with people wearing a printed 2D mask,\n- silicone - contains of videos with people wearing a silicone mask,\n- dataset_info.csvl - includes the information about videos in the dataset", "### File with the extension .csv\n- video: link to the video,\n- type: type of the video", "# Attacks might be collected in accordance with your requirements.", "## TrainingData provides high-quality data annotation tailored to your needs\n\nMore datasets in TrainingData's Kaggle account: URL\n\nTrainingData's GitHub: URL" ]
[ "TAGS\n#task_categories-video-classification #language-English #license-cc-by-nc-nd-4.0 #code #finance #region-us \n", "# Silicone Masks Biometric Attacks\nThe dataset consists of videos of individuals and attacks with printed 2D masks and silicone masks . Videos are filmed in different lightning conditions (*in a dark room, daylight, light room and nightlight*). Dataset includes videos of people with different attributes (*glasses, mask, hat, hood, wigs and mustaches for men*).", "### Types of videos in the dataset:\n- real - real video of the person \n- outline -video of the person wearing a printed 2D mask\n- silicone - video of the person wearing a silicone mask\n\n![](URL", "## Full version of the dataset includes 5792 videos", "### Types and number of videos in the full dataset:\n- 2885 real videos of people\n- 2859 videos of people wearing silicone mask\n- 48 videos of people wearing a 2D mask.", "### Gender of people in the dataset:\n- women: 2685\n- men: 3107\n\nThe dataset serves as a valuable resource for computer vision, anti-spoofing tasks, video analysis, and security systems. It allows for the development of algorithms and models that can effectively detect attacks.\n\nStudying the dataset may lead to the development of improved *security systems, surveillance technologies, and solutions to mitigate the risks associated with masked individuals carrying out attacks*.", "# Get the dataset", "### This is just an example of the data\n\nLeave a request on URL to discuss your requirements, learn about the price and buy the dataset.", "# Content\n- real - contains of real videos of people,\n- mask - contains of videos with people wearing a printed 2D mask,\n- silicone - contains of videos with people wearing a silicone mask,\n- dataset_info.csvl - includes the information about videos in the dataset", "### File with the extension .csv\n- video: link to the video,\n- type: type of the video", "# Attacks might be collected in accordance with your requirements.", "## TrainingData provides high-quality data annotation tailored to your needs\n\nMore datasets in TrainingData's Kaggle account: URL\n\nTrainingData's GitHub: URL" ]
[ 39, 90, 50, 11, 41, 110, 5, 30, 62, 25, 13, 39 ]
[ "passage: TAGS\n#task_categories-video-classification #language-English #license-cc-by-nc-nd-4.0 #code #finance #region-us \n# Silicone Masks Biometric Attacks\nThe dataset consists of videos of individuals and attacks with printed 2D masks and silicone masks . Videos are filmed in different lightning conditions (*in a dark room, daylight, light room and nightlight*). Dataset includes videos of people with different attributes (*glasses, mask, hat, hood, wigs and mustaches for men*).### Types of videos in the dataset:\n- real - real video of the person \n- outline -video of the person wearing a printed 2D mask\n- silicone - video of the person wearing a silicone mask\n\n![](URL## Full version of the dataset includes 5792 videos### Types and number of videos in the full dataset:\n- 2885 real videos of people\n- 2859 videos of people wearing silicone mask\n- 48 videos of people wearing a 2D mask.### Gender of people in the dataset:\n- women: 2685\n- men: 3107\n\nThe dataset serves as a valuable resource for computer vision, anti-spoofing tasks, video analysis, and security systems. It allows for the development of algorithms and models that can effectively detect attacks.\n\nStudying the dataset may lead to the development of improved *security systems, surveillance technologies, and solutions to mitigate the risks associated with masked individuals carrying out attacks*.# Get the dataset### This is just an example of the data\n\nLeave a request on URL to discuss your requirements, learn about the price and buy the dataset.# Content\n- real - contains of real videos of people,\n- mask - contains of videos with people wearing a printed 2D mask,\n- silicone - contains of videos with people wearing a silicone mask,\n- dataset_info.csvl - includes the information about videos in the dataset### File with the extension .csv\n- video: link to the video,\n- type: type of the video# Attacks might be collected in accordance with your requirements." ]
1b467c625195c7664723c2a24b32736106c9173b
# Dataset Card for Evaluation run of boomerchan/magpie-13b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/boomerchan/magpie-13b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [boomerchan/magpie-13b](https://huggingface.co/boomerchan/magpie-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_boomerchan__magpie-13b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-27T04:34:42.967550](https://huggingface.co/datasets/open-llm-leaderboard/details_boomerchan__magpie-13b/blob/main/results_2023-10-27T04-34-42.967550.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.14272231543624161, "em_stderr": 0.003582171317651424, "f1": 0.20778418624161069, "f1_stderr": 0.0036307604368272656, "acc": 0.4548027044477143, "acc_stderr": 0.01080662148135179 }, "harness|drop|3": { "em": 0.14272231543624161, "em_stderr": 0.003582171317651424, "f1": 0.20778418624161069, "f1_stderr": 0.0036307604368272656 }, "harness|gsm8k|5": { "acc": 0.14480667172100076, "acc_stderr": 0.009693234799052706 }, "harness|winogrande|5": { "acc": 0.7647987371744278, "acc_stderr": 0.011920008163650877 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_boomerchan__magpie-13b
[ "region:us" ]
2023-10-03T10:49:13+00:00
{"pretty_name": "Evaluation run of boomerchan/magpie-13b", "dataset_summary": "Dataset automatically created during the evaluation run of model [boomerchan/magpie-13b](https://huggingface.co/boomerchan/magpie-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_boomerchan__magpie-13b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-27T04:34:42.967550](https://huggingface.co/datasets/open-llm-leaderboard/details_boomerchan__magpie-13b/blob/main/results_2023-10-27T04-34-42.967550.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.14272231543624161,\n \"em_stderr\": 0.003582171317651424,\n \"f1\": 0.20778418624161069,\n \"f1_stderr\": 0.0036307604368272656,\n \"acc\": 0.4548027044477143,\n \"acc_stderr\": 0.01080662148135179\n },\n \"harness|drop|3\": {\n \"em\": 0.14272231543624161,\n \"em_stderr\": 0.003582171317651424,\n \"f1\": 0.20778418624161069,\n \"f1_stderr\": 0.0036307604368272656\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.14480667172100076,\n \"acc_stderr\": 0.009693234799052706\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7647987371744278,\n \"acc_stderr\": 0.011920008163650877\n }\n}\n```", "repo_url": "https://huggingface.co/boomerchan/magpie-13b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|arc:challenge|25_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_27T04_34_42.967550", "path": ["**/details_harness|drop|3_2023-10-27T04-34-42.967550.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-27T04-34-42.967550.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_27T04_34_42.967550", "path": ["**/details_harness|gsm8k|5_2023-10-27T04-34-42.967550.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-27T04-34-42.967550.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hellaswag|10_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T11-48-49.581129.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T11-48-49.581129.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T11-48-49.581129.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_27T04_34_42.967550", "path": ["**/details_harness|winogrande|5_2023-10-27T04-34-42.967550.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-27T04-34-42.967550.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T11_48_49.581129", "path": ["results_2023-10-03T11-48-49.581129.parquet"]}, {"split": "2023_10_27T04_34_42.967550", "path": ["results_2023-10-27T04-34-42.967550.parquet"]}, {"split": "latest", "path": ["results_2023-10-27T04-34-42.967550.parquet"]}]}]}
2023-10-27T03:34:56+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of boomerchan/magpie-13b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model boomerchan/magpie-13b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-27T04:34:42.967550(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of boomerchan/magpie-13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model boomerchan/magpie-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-27T04:34:42.967550(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of boomerchan/magpie-13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model boomerchan/magpie-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-27T04:34:42.967550(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 17, 31, 165, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of boomerchan/magpie-13b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model boomerchan/magpie-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-27T04:34:42.967550(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
9a6f8e46e71def184b58bb8126cad24d3ae28eea
# Dataset Card for "gpt_target_group_v1-1" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
datazeit/gpt_target_group_v1-1
[ "region:us" ]
2023-10-03T10:54:37+00:00
{"dataset_info": {"features": [{"name": "instruction", "dtype": "string"}, {"name": "output", "dtype": "string"}, {"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 11289432, "num_examples": 4452}], "download_size": 0, "dataset_size": 11289432}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-03T12:06:28+00:00
[]
[]
TAGS #region-us
# Dataset Card for "gpt_target_group_v1-1" More Information needed
[ "# Dataset Card for \"gpt_target_group_v1-1\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"gpt_target_group_v1-1\"\n\nMore Information needed" ]
[ 6, 20 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"gpt_target_group_v1-1\"\n\nMore Information needed" ]
11a231a2b4b32f7c2ec8739079a41a80997cb7b3
# Dataset Card for "medQA_test" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
hippocrates/medQA_test
[ "region:us" ]
2023-10-03T11:12:56+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "valid", "path": "data/valid-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "query", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "choices", "sequence": "string"}, {"name": "gold", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 14882350, "num_examples": 10178}, {"name": "valid", "num_bytes": 1858943, "num_examples": 1272}, {"name": "test", "num_bytes": 1887041, "num_examples": 1273}], "download_size": 7375278, "dataset_size": 18628334}}
2024-01-25T16:50:53+00:00
[]
[]
TAGS #region-us
# Dataset Card for "medQA_test" More Information needed
[ "# Dataset Card for \"medQA_test\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"medQA_test\"\n\nMore Information needed" ]
[ 6, 14 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"medQA_test\"\n\nMore Information needed" ]
57c706d6d70e7bdff30e9d90a0519197e7166c46
CSDN - C 语言社区 **2020.10.2 ~ 2023.10.2** 的问答数据,未包含图片,仅有文本内容。 共 **2380** 条,数据已经经过**初步清洗和脱敏**,去除了所有 0 回复的贴子 & 机器人回复的贴子。为了方便不同使用目的,按照回复盖楼的格式对数据进行了组织,一个样例(展开后)如下: ```json { "question": "刚学C语言,为什么这个代码运行不了呢", "poster": "user-0", "comments": [ { "cid": "2", "user": "user-2", "content": "intunsigned intlong longunsigned long long统统容纳不下29的阶乘,早就溢出了。", "referer": "user-0" }, { "cid": "3", "user": "user-3", "content": "#include <stdio.h> #include <math.h> int main(void) { int i = 1; long long sum = 1; // 使用 long long 类型来存储阶乘结果 int x; printf(\"请输入一个非负整数: \"); if (scanf(\"%d\", &x) != 1 || x < 0) { printf(\"输入无效,请输入一个非负整数。\\n\"); return 1; // 返回错误码 } while (i <= x) { sum *= i; i++; } printf(\"%d 的阶乘是 %lld\\n\", x, sum); return 0; }", "referer": "user-0" } ] } ``` `user` 和 `referer` 做了脱敏映射,但保留了回复的逻辑关系(即保留了回复楼主 & 楼中楼回复的逻辑关系)。 `question` 和 `comment` 都按照单行形式进行了组织,无需额外处理。 由于部分回答较长,出于可能的长文需要,因此没有对数据进行修剪,数据具体的分位点如下,请按需修剪: ``` question comments count 2380.000000 2380.000000 mean 22.074370 1528.050840 std 14.986499 2608.022392 min 4.000000 69.000000 10% 7.900000 160.900000 20% 12.000000 235.800000 30% 14.000000 342.000000 40% 16.000000 469.000000 50% 18.000000 648.500000 60% 21.000000 889.000000 70% 25.000000 1234.300000 75% 27.000000 1542.500000 80% 30.000000 1990.400000 85% 34.000000 2665.800000 90% 40.000000 3810.800000 95% 51.000000 6008.050000 max 130.000000 30606.000000 ```
Mxode/CSDN-Community-C-Language-3years
[ "task_categories:question-answering", "task_categories:conversational", "task_categories:text-generation", "size_categories:1K<n<10K", "language:zh", "license:lgpl", "code", "region:us" ]
2023-10-03T11:20:18+00:00
{"language": ["zh"], "license": "lgpl", "size_categories": ["1K<n<10K"], "task_categories": ["question-answering", "conversational", "text-generation"], "tags": ["code"]}
2023-10-03T11:36:23+00:00
[]
[ "zh" ]
TAGS #task_categories-question-answering #task_categories-conversational #task_categories-text-generation #size_categories-1K<n<10K #language-Chinese #license-lgpl #code #region-us
CSDN - C 语言社区 2020.10.2 ~ 2023.10.2 的问答数据,未包含图片,仅有文本内容。 共 2380 条,数据已经经过初步清洗和脱敏,去除了所有 0 回复的贴子 & 机器人回复的贴子。为了方便不同使用目的,按照回复盖楼的格式对数据进行了组织,一个样例(展开后)如下: 'user' 和 'referer' 做了脱敏映射,但保留了回复的逻辑关系(即保留了回复楼主 & 楼中楼回复的逻辑关系)。 'question' 和 'comment' 都按照单行形式进行了组织,无需额外处理。 由于部分回答较长,出于可能的长文需要,因此没有对数据进行修剪,数据具体的分位点如下,请按需修剪:
[]
[ "TAGS\n#task_categories-question-answering #task_categories-conversational #task_categories-text-generation #size_categories-1K<n<10K #language-Chinese #license-lgpl #code #region-us \n" ]
[ 64 ]
[ "passage: TAGS\n#task_categories-question-answering #task_categories-conversational #task_categories-text-generation #size_categories-1K<n<10K #language-Chinese #license-lgpl #code #region-us \n" ]
036fe41b9eb275709a1a07fed5ed3f4c2716687e
# Dataset Card for "cm4-synthetic-testing-with-embeddings" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
HuggingFaceM4/cm4-synthetic-testing-with-embeddings
[ "region:us" ]
2023-10-03T11:23:54+00:00
{"dataset_info": [{"config_name": "100.unique.embeddings", "features": [{"name": "texts", "sequence": "string"}, {"name": "metadata", "dtype": "string"}, {"name": "original_idx", "dtype": "int64"}, {"name": "image_embeddings", "sequence": {"sequence": {"sequence": "float64"}}}], "splits": [{"name": "train", "num_bytes": 15422178, "num_examples": 100}], "download_size": 15204174, "dataset_size": 15422178}, {"config_name": "100.unique.pixels", "features": [{"name": "texts", "sequence": "string"}, {"name": "images", "sequence": "image"}, {"name": "metadata", "dtype": "string"}, {"name": "original_idx", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 7278379.0, "num_examples": 100}], "download_size": 6801949, "dataset_size": 7278379.0}], "configs": [{"config_name": "100.unique.embeddings", "data_files": [{"split": "train", "path": "100.unique.embeddings/train-*"}]}, {"config_name": "100.unique.pixels", "data_files": [{"split": "train", "path": "100.unique.pixels/train-*"}]}]}
2023-10-03T11:25:35+00:00
[]
[]
TAGS #region-us
# Dataset Card for "cm4-synthetic-testing-with-embeddings" More Information needed
[ "# Dataset Card for \"cm4-synthetic-testing-with-embeddings\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"cm4-synthetic-testing-with-embeddings\"\n\nMore Information needed" ]
[ 6, 24 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"cm4-synthetic-testing-with-embeddings\"\n\nMore Information needed" ]
97648871f703de05628a7dd19ca6551351a666b9
# Dataset Card for "medMCQA_test" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
hippocrates/medMCQA_test
[ "region:us" ]
2023-10-03T11:29:51+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "valid", "path": "data/valid-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "query", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "choices", "sequence": "string"}, {"name": "gold", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 122684920, "num_examples": 182822}, {"name": "valid", "num_bytes": 2873180, "num_examples": 4183}, {"name": "test", "num_bytes": 3969491, "num_examples": 6150}], "download_size": 31113834, "dataset_size": 129527591}}
2024-01-25T16:48:32+00:00
[]
[]
TAGS #region-us
# Dataset Card for "medMCQA_test" More Information needed
[ "# Dataset Card for \"medMCQA_test\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"medMCQA_test\"\n\nMore Information needed" ]
[ 6, 15 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"medMCQA_test\"\n\nMore Information needed" ]
cd6cd4001aef4e393c46095ad0bc5e146948e40a
# Dataset Card for "3d_perspective_drawing" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Falah/3d_perspective_drawing
[ "region:us" ]
2023-10-03T11:36:17+00:00
{"dataset_info": {"features": [{"name": "prompts", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 174080, "num_examples": 1000}], "download_size": 18501, "dataset_size": 174080}}
2023-10-03T11:36:18+00:00
[]
[]
TAGS #region-us
# Dataset Card for "3d_perspective_drawing" More Information needed
[ "# Dataset Card for \"3d_perspective_drawing\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"3d_perspective_drawing\"\n\nMore Information needed" ]
[ 6, 19 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"3d_perspective_drawing\"\n\nMore Information needed" ]
295f25ad2a2456142c53f80550b6ae028e32bcd2
# Dataset Card for "llama2d-unscramble" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
llama2d/llama2d-unscramble
[ "region:us" ]
2023-10-03T12:01:32+00:00
{"dataset_info": {"features": [{"name": "input_ids", "sequence": "float32"}, {"name": "coords", "sequence": {"sequence": "float32"}}, {"name": "labels", "sequence": "float32"}, {"name": "attention_mask", "sequence": "float32"}], "splits": [{"name": "train", "num_bytes": 30080000, "num_examples": 5000}], "download_size": 0, "dataset_size": 30080000}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-07T01:15:20+00:00
[]
[]
TAGS #region-us
# Dataset Card for "llama2d-unscramble" More Information needed
[ "# Dataset Card for \"llama2d-unscramble\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"llama2d-unscramble\"\n\nMore Information needed" ]
[ 6, 19 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"llama2d-unscramble\"\n\nMore Information needed" ]
91fb34d8a3b3a04138dfb3529b982ad6df678118
# Dataset Card for Evaluation run of Undi95/MLewd-ReMM-L2-Chat-20B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Undi95/MLewd-ReMM-L2-Chat-20B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Undi95/MLewd-ReMM-L2-Chat-20B](https://huggingface.co/Undi95/MLewd-ReMM-L2-Chat-20B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Undi95__MLewd-ReMM-L2-Chat-20B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-23T21:41:03.684290](https://huggingface.co/datasets/open-llm-leaderboard/details_Undi95__MLewd-ReMM-L2-Chat-20B/blob/main/results_2023-10-23T21-41-03.684290.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.1294043624161074, "em_stderr": 0.0034373389026090095, "f1": 0.22332843959731472, "f1_stderr": 0.003630049548732814, "acc": 0.4405378736970978, "acc_stderr": 0.01019155255908737 }, "harness|drop|3": { "em": 0.1294043624161074, "em_stderr": 0.0034373389026090095, "f1": 0.22332843959731472, "f1_stderr": 0.003630049548732814 }, "harness|gsm8k|5": { "acc": 0.10917361637604246, "acc_stderr": 0.008590089300511142 }, "harness|winogrande|5": { "acc": 0.7719021310181531, "acc_stderr": 0.011793015817663595 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_Undi95__MLewd-ReMM-L2-Chat-20B
[ "region:us" ]
2023-10-03T12:01:33+00:00
{"pretty_name": "Evaluation run of Undi95/MLewd-ReMM-L2-Chat-20B", "dataset_summary": "Dataset automatically created during the evaluation run of model [Undi95/MLewd-ReMM-L2-Chat-20B](https://huggingface.co/Undi95/MLewd-ReMM-L2-Chat-20B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Undi95__MLewd-ReMM-L2-Chat-20B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-23T21:41:03.684290](https://huggingface.co/datasets/open-llm-leaderboard/details_Undi95__MLewd-ReMM-L2-Chat-20B/blob/main/results_2023-10-23T21-41-03.684290.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.1294043624161074,\n \"em_stderr\": 0.0034373389026090095,\n \"f1\": 0.22332843959731472,\n \"f1_stderr\": 0.003630049548732814,\n \"acc\": 0.4405378736970978,\n \"acc_stderr\": 0.01019155255908737\n },\n \"harness|drop|3\": {\n \"em\": 0.1294043624161074,\n \"em_stderr\": 0.0034373389026090095,\n \"f1\": 0.22332843959731472,\n \"f1_stderr\": 0.003630049548732814\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.10917361637604246,\n \"acc_stderr\": 0.008590089300511142\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7719021310181531,\n \"acc_stderr\": 0.011793015817663595\n }\n}\n```", "repo_url": "https://huggingface.co/Undi95/MLewd-ReMM-L2-Chat-20B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|arc:challenge|25_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_23T21_41_03.684290", "path": ["**/details_harness|drop|3_2023-10-23T21-41-03.684290.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-23T21-41-03.684290.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_23T21_41_03.684290", "path": ["**/details_harness|gsm8k|5_2023-10-23T21-41-03.684290.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-23T21-41-03.684290.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hellaswag|10_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T13-01-09.823619.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T13-01-09.823619.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T13-01-09.823619.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_23T21_41_03.684290", "path": ["**/details_harness|winogrande|5_2023-10-23T21-41-03.684290.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-23T21-41-03.684290.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T13_01_09.823619", "path": ["results_2023-10-03T13-01-09.823619.parquet"]}, {"split": "2023_10_23T21_41_03.684290", "path": ["results_2023-10-23T21-41-03.684290.parquet"]}, {"split": "latest", "path": ["results_2023-10-23T21-41-03.684290.parquet"]}]}]}
2023-10-23T20:41:16+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Undi95/MLewd-ReMM-L2-Chat-20B ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model Undi95/MLewd-ReMM-L2-Chat-20B on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-23T21:41:03.684290(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of Undi95/MLewd-ReMM-L2-Chat-20B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Undi95/MLewd-ReMM-L2-Chat-20B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T21:41:03.684290(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Undi95/MLewd-ReMM-L2-Chat-20B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Undi95/MLewd-ReMM-L2-Chat-20B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T21:41:03.684290(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 25, 31, 173, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Undi95/MLewd-ReMM-L2-Chat-20B## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model Undi95/MLewd-ReMM-L2-Chat-20B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-23T21:41:03.684290(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
85d60317d39a58f2317ab73227d78757131618ce
# Dataset Card for "SQL_CleanedKaggle" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
AayushShah/SQL_CleanedKaggle
[ "region:us" ]
2023-10-03T12:09:51+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "input_ids", "sequence": "int32"}, {"name": "attention_mask", "sequence": "int8"}, {"name": "labels", "sequence": "int64"}], "splits": [{"name": "train", "num_bytes": 958625276.0, "num_examples": 266581}, {"name": "test", "num_bytes": 106517116.0, "num_examples": 29621}], "download_size": 50495032, "dataset_size": 1065142392.0}}
2023-10-03T12:15:09+00:00
[]
[]
TAGS #region-us
# Dataset Card for "SQL_CleanedKaggle" More Information needed
[ "# Dataset Card for \"SQL_CleanedKaggle\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"SQL_CleanedKaggle\"\n\nMore Information needed" ]
[ 6, 17 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"SQL_CleanedKaggle\"\n\nMore Information needed" ]
b00d4933d1b4e6ca6b82a55d01cdfe03089f8180
# Dataset Card for "lemmatized-wikitext" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yavasde/lemmatized-wikitext2
[ "region:us" ]
2023-10-03T12:16:17+00:00
{"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 2652445, "num_examples": 23767}, {"name": "test", "num_bytes": 313242, "num_examples": 2891}, {"name": "valid", "num_bytes": 284363, "num_examples": 2461}], "download_size": 1949711, "dataset_size": 3250050}}
2023-10-03T12:16:22+00:00
[]
[]
TAGS #region-us
# Dataset Card for "lemmatized-wikitext" More Information needed
[ "# Dataset Card for \"lemmatized-wikitext\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"lemmatized-wikitext\"\n\nMore Information needed" ]
[ 6, 16 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"lemmatized-wikitext\"\n\nMore Information needed" ]
4b4c7782e3e9069dc073a4c9367c6d1384e86af9
# Dataset Card for Evaluation run of CobraMamba/mamba-gpt-7b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/CobraMamba/mamba-gpt-7b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [CobraMamba/mamba-gpt-7b](https://huggingface.co/CobraMamba/mamba-gpt-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_CobraMamba__mamba-gpt-7b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-24T09:03:51.727518](https://huggingface.co/datasets/open-llm-leaderboard/details_CobraMamba__mamba-gpt-7b/blob/main/results_2023-10-24T09-03-51.727518.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.012269295302013422, "em_stderr": 0.001127375878187339, "f1": 0.06721057046979886, "f1_stderr": 0.00169632190136596, "acc": 0.4182206151008902, "acc_stderr": 0.010804482569529197 }, "harness|drop|3": { "em": 0.012269295302013422, "em_stderr": 0.001127375878187339, "f1": 0.06721057046979886, "f1_stderr": 0.00169632190136596 }, "harness|gsm8k|5": { "acc": 0.1197877179681577, "acc_stderr": 0.00894421340355307 }, "harness|winogrande|5": { "acc": 0.7166535122336227, "acc_stderr": 0.012664751735505323 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_CobraMamba__mamba-gpt-7b
[ "region:us" ]
2023-10-03T12:22:46+00:00
{"pretty_name": "Evaluation run of CobraMamba/mamba-gpt-7b", "dataset_summary": "Dataset automatically created during the evaluation run of model [CobraMamba/mamba-gpt-7b](https://huggingface.co/CobraMamba/mamba-gpt-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_CobraMamba__mamba-gpt-7b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-24T09:03:51.727518](https://huggingface.co/datasets/open-llm-leaderboard/details_CobraMamba__mamba-gpt-7b/blob/main/results_2023-10-24T09-03-51.727518.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.012269295302013422,\n \"em_stderr\": 0.001127375878187339,\n \"f1\": 0.06721057046979886,\n \"f1_stderr\": 0.00169632190136596,\n \"acc\": 0.4182206151008902,\n \"acc_stderr\": 0.010804482569529197\n },\n \"harness|drop|3\": {\n \"em\": 0.012269295302013422,\n \"em_stderr\": 0.001127375878187339,\n \"f1\": 0.06721057046979886,\n \"f1_stderr\": 0.00169632190136596\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.1197877179681577,\n \"acc_stderr\": 0.00894421340355307\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7166535122336227,\n \"acc_stderr\": 0.012664751735505323\n }\n}\n```", "repo_url": "https://huggingface.co/CobraMamba/mamba-gpt-7b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|arc:challenge|25_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_24T09_03_51.727518", "path": ["**/details_harness|drop|3_2023-10-24T09-03-51.727518.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-24T09-03-51.727518.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_24T09_03_51.727518", "path": ["**/details_harness|gsm8k|5_2023-10-24T09-03-51.727518.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-24T09-03-51.727518.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hellaswag|10_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T13-22-21.722990.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T13-22-21.722990.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T13-22-21.722990.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_24T09_03_51.727518", "path": ["**/details_harness|winogrande|5_2023-10-24T09-03-51.727518.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-24T09-03-51.727518.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T13_22_21.722990", "path": ["results_2023-10-03T13-22-21.722990.parquet"]}, {"split": "2023_10_24T09_03_51.727518", "path": ["results_2023-10-24T09-03-51.727518.parquet"]}, {"split": "latest", "path": ["results_2023-10-24T09-03-51.727518.parquet"]}]}]}
2023-10-24T08:04:04+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of CobraMamba/mamba-gpt-7b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model CobraMamba/mamba-gpt-7b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-24T09:03:51.727518(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of CobraMamba/mamba-gpt-7b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model CobraMamba/mamba-gpt-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T09:03:51.727518(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of CobraMamba/mamba-gpt-7b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model CobraMamba/mamba-gpt-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T09:03:51.727518(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 21, 31, 169, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of CobraMamba/mamba-gpt-7b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model CobraMamba/mamba-gpt-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-24T09:03:51.727518(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
ff1282ef232cac2ee57b5fe231ccd930862ad7d0
# Dataset Card for "3d_object_drawing" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Falah/3d_object_drawing
[ "region:us" ]
2023-10-03T12:29:00+00:00
{"dataset_info": {"features": [{"name": "prompts", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 181730, "num_examples": 1000}], "download_size": 4528, "dataset_size": 181730}}
2023-10-03T12:29:02+00:00
[]
[]
TAGS #region-us
# Dataset Card for "3d_object_drawing" More Information needed
[ "# Dataset Card for \"3d_object_drawing\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"3d_object_drawing\"\n\nMore Information needed" ]
[ 6, 17 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"3d_object_drawing\"\n\nMore Information needed" ]
8c7ac03ee373e19b083f5c8016c7a836c24f9836
# Dataset Card for "embeddings_from_distilbert_class_heaps_and_eval1perc_2" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
johannes-garstenauer/embeddings_from_distilbert_class_heaps_and_eval1perc_2
[ "region:us" ]
2023-10-03T12:34:12+00:00
{"dataset_info": {"features": [{"name": "struct", "dtype": "string"}, {"name": "label", "dtype": "int64"}, {"name": "pred", "dtype": "int64"}, {"name": "cls_layer_6", "sequence": "float32"}, {"name": "cls_layer_5", "sequence": "float32"}, {"name": "cls_layer_4", "sequence": "float32"}], "splits": [{"name": "train", "num_bytes": 25659437, "num_examples": 2691}], "download_size": 30374962, "dataset_size": 25659437}}
2023-10-03T12:34:22+00:00
[]
[]
TAGS #region-us
# Dataset Card for "embeddings_from_distilbert_class_heaps_and_eval1perc_2" More Information needed
[ "# Dataset Card for \"embeddings_from_distilbert_class_heaps_and_eval1perc_2\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"embeddings_from_distilbert_class_heaps_and_eval1perc_2\"\n\nMore Information needed" ]
[ 6, 34 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"embeddings_from_distilbert_class_heaps_and_eval1perc_2\"\n\nMore Information needed" ]
a1bc9aec2bb4acd86abc07e1afe52064f4f4d114
# Dataset Card for "IP2P-hiddenwm-200" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
FelixdoingAI/IP2P-hiddenwm-200
[ "region:us" ]
2023-10-03T12:44:07+00:00
{"dataset_info": {"features": [{"name": "original_prompt", "dtype": "string"}, {"name": "original_image", "dtype": "image"}, {"name": "edit_prompt", "dtype": "string"}, {"name": "edited_prompt", "dtype": "string"}, {"name": "edited_image", "dtype": "image"}, {"name": "adversarial_image", "dtype": "image"}], "splits": [{"name": "train", "num_bytes": 104484241.0, "num_examples": 200}], "download_size": 104481659, "dataset_size": 104484241.0}}
2023-10-03T13:09:13+00:00
[]
[]
TAGS #region-us
# Dataset Card for "IP2P-hiddenwm-200" More Information needed
[ "# Dataset Card for \"IP2P-hiddenwm-200\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"IP2P-hiddenwm-200\"\n\nMore Information needed" ]
[ 6, 18 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"IP2P-hiddenwm-200\"\n\nMore Information needed" ]
5062f7124b2e7b2b276635760fc5b4cc074a9218
# Dataset Card for Evaluation run of Xilabs/calypso-3b-alpha-v2 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Xilabs/calypso-3b-alpha-v2 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Xilabs/calypso-3b-alpha-v2](https://huggingface.co/Xilabs/calypso-3b-alpha-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Xilabs__calypso-3b-alpha-v2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-24T19:29:31.085880](https://huggingface.co/datasets/open-llm-leaderboard/details_Xilabs__calypso-3b-alpha-v2/blob/main/results_2023-10-24T19-29-31.085880.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.17051174496644295, "em_stderr": 0.0038514292227271175, "f1": 0.2208368288590607, "f1_stderr": 0.0039617614020142555, "acc": 0.3297731593317987, "acc_stderr": 0.007824223176002862 }, "harness|drop|3": { "em": 0.17051174496644295, "em_stderr": 0.0038514292227271175, "f1": 0.2208368288590607, "f1_stderr": 0.0039617614020142555 }, "harness|gsm8k|5": { "acc": 0.006823351023502654, "acc_stderr": 0.0022675371022544896 }, "harness|winogrande|5": { "acc": 0.6527229676400947, "acc_stderr": 0.013380909249751233 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_Xilabs__calypso-3b-alpha-v2
[ "region:us" ]
2023-10-03T13:02:03+00:00
{"pretty_name": "Evaluation run of Xilabs/calypso-3b-alpha-v2", "dataset_summary": "Dataset automatically created during the evaluation run of model [Xilabs/calypso-3b-alpha-v2](https://huggingface.co/Xilabs/calypso-3b-alpha-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Xilabs__calypso-3b-alpha-v2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-24T19:29:31.085880](https://huggingface.co/datasets/open-llm-leaderboard/details_Xilabs__calypso-3b-alpha-v2/blob/main/results_2023-10-24T19-29-31.085880.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.17051174496644295,\n \"em_stderr\": 0.0038514292227271175,\n \"f1\": 0.2208368288590607,\n \"f1_stderr\": 0.0039617614020142555,\n \"acc\": 0.3297731593317987,\n \"acc_stderr\": 0.007824223176002862\n },\n \"harness|drop|3\": {\n \"em\": 0.17051174496644295,\n \"em_stderr\": 0.0038514292227271175,\n \"f1\": 0.2208368288590607,\n \"f1_stderr\": 0.0039617614020142555\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.006823351023502654,\n \"acc_stderr\": 0.0022675371022544896\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6527229676400947,\n \"acc_stderr\": 0.013380909249751233\n }\n}\n```", "repo_url": "https://huggingface.co/Xilabs/calypso-3b-alpha-v2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|arc:challenge|25_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_24T19_29_31.085880", "path": ["**/details_harness|drop|3_2023-10-24T19-29-31.085880.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-24T19-29-31.085880.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_24T19_29_31.085880", "path": ["**/details_harness|gsm8k|5_2023-10-24T19-29-31.085880.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-24T19-29-31.085880.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hellaswag|10_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T14-01-45.504923.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T14-01-45.504923.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T14-01-45.504923.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_24T19_29_31.085880", "path": ["**/details_harness|winogrande|5_2023-10-24T19-29-31.085880.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-24T19-29-31.085880.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T14_01_45.504923", "path": ["results_2023-10-03T14-01-45.504923.parquet"]}, {"split": "2023_10_24T19_29_31.085880", "path": ["results_2023-10-24T19-29-31.085880.parquet"]}, {"split": "latest", "path": ["results_2023-10-24T19-29-31.085880.parquet"]}]}]}
2023-10-24T18:29:43+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Xilabs/calypso-3b-alpha-v2 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model Xilabs/calypso-3b-alpha-v2 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-24T19:29:31.085880(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of Xilabs/calypso-3b-alpha-v2", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Xilabs/calypso-3b-alpha-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T19:29:31.085880(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Xilabs/calypso-3b-alpha-v2", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Xilabs/calypso-3b-alpha-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T19:29:31.085880(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 24, 31, 172, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Xilabs/calypso-3b-alpha-v2## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model Xilabs/calypso-3b-alpha-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-24T19:29:31.085880(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
96f3de722898330f2ff066f7b3f43f6bbb6c1d8f
天文学百科,包含 8 个子目录,约 1000 条词条、110,0000 个字符。 数据包含一级目录、二级目录、标题、内容。其中**内容已经处理为单行**,且**文本普遍较长**。 一个样例如下: ```json { "top_category": "天文学", "sub_category": "天体力学", "title": "万有引力定律", "content": "万有引力定律(汉语拼音:wàn yǒu yǐn lì zhī dìng lǜ),(universal gravitation,law of),自然界中任何两个质点都相互吸引,这个力同两个质点的质量的乘积成正比,同它们之间的距离的二次方成反比。如用m1、m2表示两质点的质量,r表示两质点间的距离,F表示作用力的值,则F=Gm1m2/r2,式中的G是比例常量,称万有引力常量或牛顿引力常量,数值因不同单位制而异,在国际单位制中G为6.672×1011牛顿·米2/千克2。这个定律由牛顿于1687年在《原理》上首次发表,它和牛顿运动定律一起,构成了牛顿力学特别是天体力学的基础。\n  在牛顿公布该定律之前,胡克、惠更斯都曾根据开普勒定律推测行星和太阳间存在和距离二次方成反比的引力,但未能提出数学证明,为此胡克还和牛顿通过信,因此对定律的首创权有过争议。牛顿还曾对晚年的忘年交斯多克雷说过,1666年他在家乡避瘟疫时,曾因见苹果从树上落地而想到地球对苹果的引力是否可延伸到月球。此说传布很广,许多科学家深信不疑,并对牛顿为何推迟20年才发表有种种推测。但也有人根据牛顿晚年的精神状态,认为他对斯多克雷所说的并非真情。\n  一般物体之间的引力,在物体尺度远小于质心距离时,可视为质点;尺度和间距相近时,须视为质点系,用积分法求引力。但牛顿已算出一个密度均匀的圆球对附近质点的引力同把圆球的质量集中于球心时完全一致。对万有引力的起因,牛顿未作解释,把它视为超距力或以太的作用,系后人所为。爱因斯坦在广义相对论中将引力归之于时空曲率的变化。" } ```
Mxode/Baike-Astronomy-ZH
[ "task_categories:text-generation", "size_categories:n<1K", "language:zh", "license:apache-2.0", "astronomy", "region:us" ]
2023-10-03T13:08:08+00:00
{"language": ["zh"], "license": "apache-2.0", "size_categories": ["n<1K"], "task_categories": ["text-generation"], "tags": ["astronomy"]}
2023-10-03T13:19:38+00:00
[]
[ "zh" ]
TAGS #task_categories-text-generation #size_categories-n<1K #language-Chinese #license-apache-2.0 #astronomy #region-us
天文学百科,包含 8 个子目录,约 1000 条词条、110,0000 个字符。 数据包含一级目录、二级目录、标题、内容。其中内容已经处理为单行,且文本普遍较长。 一个样例如下:
[]
[ "TAGS\n#task_categories-text-generation #size_categories-n<1K #language-Chinese #license-apache-2.0 #astronomy #region-us \n" ]
[ 44 ]
[ "passage: TAGS\n#task_categories-text-generation #size_categories-n<1K #language-Chinese #license-apache-2.0 #astronomy #region-us \n" ]
dc0ea7f41fe1034e93170440510464b883844167
# Dataset Card for Evaluation run of winglian/basilisk-4b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/winglian/basilisk-4b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [winglian/basilisk-4b](https://huggingface.co/winglian/basilisk-4b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_winglian__basilisk-4b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-24T05:37:13.853267](https://huggingface.co/datasets/open-llm-leaderboard/details_winglian__basilisk-4b/blob/main/results_2023-10-24T05-37-13.853267.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.005453020134228188, "em_stderr": 0.000754172779679255, "f1": 0.03888842281879202, "f1_stderr": 0.001263593140660679, "acc": 0.26558800315706393, "acc_stderr": 0.007012571320319758 }, "harness|drop|3": { "em": 0.005453020134228188, "em_stderr": 0.000754172779679255, "f1": 0.03888842281879202, "f1_stderr": 0.001263593140660679 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 }, "harness|winogrande|5": { "acc": 0.5311760063141279, "acc_stderr": 0.014025142640639516 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_winglian__basilisk-4b
[ "region:us" ]
2023-10-03T13:17:12+00:00
{"pretty_name": "Evaluation run of winglian/basilisk-4b", "dataset_summary": "Dataset automatically created during the evaluation run of model [winglian/basilisk-4b](https://huggingface.co/winglian/basilisk-4b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_winglian__basilisk-4b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-24T05:37:13.853267](https://huggingface.co/datasets/open-llm-leaderboard/details_winglian__basilisk-4b/blob/main/results_2023-10-24T05-37-13.853267.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.005453020134228188,\n \"em_stderr\": 0.000754172779679255,\n \"f1\": 0.03888842281879202,\n \"f1_stderr\": 0.001263593140660679,\n \"acc\": 0.26558800315706393,\n \"acc_stderr\": 0.007012571320319758\n },\n \"harness|drop|3\": {\n \"em\": 0.005453020134228188,\n \"em_stderr\": 0.000754172779679255,\n \"f1\": 0.03888842281879202,\n \"f1_stderr\": 0.001263593140660679\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5311760063141279,\n \"acc_stderr\": 0.014025142640639516\n }\n}\n```", "repo_url": "https://huggingface.co/winglian/basilisk-4b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|arc:challenge|25_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_24T05_37_13.853267", "path": ["**/details_harness|drop|3_2023-10-24T05-37-13.853267.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-24T05-37-13.853267.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_24T05_37_13.853267", "path": ["**/details_harness|gsm8k|5_2023-10-24T05-37-13.853267.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-24T05-37-13.853267.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hellaswag|10_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T14-16-48.676759.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T14-16-48.676759.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T14-16-48.676759.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_24T05_37_13.853267", "path": ["**/details_harness|winogrande|5_2023-10-24T05-37-13.853267.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-24T05-37-13.853267.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T14_16_48.676759", "path": ["results_2023-10-03T14-16-48.676759.parquet"]}, {"split": "2023_10_24T05_37_13.853267", "path": ["results_2023-10-24T05-37-13.853267.parquet"]}, {"split": "latest", "path": ["results_2023-10-24T05-37-13.853267.parquet"]}]}]}
2023-10-24T04:37:26+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of winglian/basilisk-4b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model winglian/basilisk-4b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-24T05:37:13.853267(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of winglian/basilisk-4b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model winglian/basilisk-4b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T05:37:13.853267(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of winglian/basilisk-4b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model winglian/basilisk-4b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T05:37:13.853267(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 18, 31, 166, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of winglian/basilisk-4b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model winglian/basilisk-4b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-24T05:37:13.853267(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
55fbabb290d1c16da28a5bd93417490a2ee7ddc7
# Dataset Card for Evaluation run of 42MARU/sitebunny-13b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/42MARU/sitebunny-13b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [42MARU/sitebunny-13b](https://huggingface.co/42MARU/sitebunny-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_42MARU__sitebunny-13b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-23T17:55:45.857449](https://huggingface.co/datasets/open-llm-leaderboard/details_42MARU__sitebunny-13b/blob/main/results_2023-10-23T17-55-45.857449.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.3419672818791946, "em_stderr": 0.004857979450579366, "f1": 0.4322766359060419, "f1_stderr": 0.004628734048935794, "acc": 0.4305885746119642, "acc_stderr": 0.009958510446364506 }, "harness|drop|3": { "em": 0.3419672818791946, "em_stderr": 0.004857979450579366, "f1": 0.4322766359060419, "f1_stderr": 0.004628734048935794 }, "harness|gsm8k|5": { "acc": 0.09401061410159212, "acc_stderr": 0.008038819818872469 }, "harness|winogrande|5": { "acc": 0.7671665351223362, "acc_stderr": 0.011878201073856544 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_42MARU__sitebunny-13b
[ "region:us" ]
2023-10-03T13:18:38+00:00
{"pretty_name": "Evaluation run of 42MARU/sitebunny-13b", "dataset_summary": "Dataset automatically created during the evaluation run of model [42MARU/sitebunny-13b](https://huggingface.co/42MARU/sitebunny-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_42MARU__sitebunny-13b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-23T17:55:45.857449](https://huggingface.co/datasets/open-llm-leaderboard/details_42MARU__sitebunny-13b/blob/main/results_2023-10-23T17-55-45.857449.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.3419672818791946,\n \"em_stderr\": 0.004857979450579366,\n \"f1\": 0.4322766359060419,\n \"f1_stderr\": 0.004628734048935794,\n \"acc\": 0.4305885746119642,\n \"acc_stderr\": 0.009958510446364506\n },\n \"harness|drop|3\": {\n \"em\": 0.3419672818791946,\n \"em_stderr\": 0.004857979450579366,\n \"f1\": 0.4322766359060419,\n \"f1_stderr\": 0.004628734048935794\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.09401061410159212,\n \"acc_stderr\": 0.008038819818872469\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7671665351223362,\n \"acc_stderr\": 0.011878201073856544\n }\n}\n```", "repo_url": "https://huggingface.co/42MARU/sitebunny-13b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|arc:challenge|25_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_23T17_55_45.857449", "path": ["**/details_harness|drop|3_2023-10-23T17-55-45.857449.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-23T17-55-45.857449.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_23T17_55_45.857449", "path": ["**/details_harness|gsm8k|5_2023-10-23T17-55-45.857449.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-23T17-55-45.857449.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hellaswag|10_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T14-18-14.630504.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T14-18-14.630504.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T14-18-14.630504.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_23T17_55_45.857449", "path": ["**/details_harness|winogrande|5_2023-10-23T17-55-45.857449.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-23T17-55-45.857449.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T14_18_14.630504", "path": ["results_2023-10-03T14-18-14.630504.parquet"]}, {"split": "2023_10_23T17_55_45.857449", "path": ["results_2023-10-23T17-55-45.857449.parquet"]}, {"split": "latest", "path": ["results_2023-10-23T17-55-45.857449.parquet"]}]}]}
2023-10-23T16:55:58+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of 42MARU/sitebunny-13b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model 42MARU/sitebunny-13b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-23T17:55:45.857449(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of 42MARU/sitebunny-13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model 42MARU/sitebunny-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T17:55:45.857449(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of 42MARU/sitebunny-13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model 42MARU/sitebunny-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T17:55:45.857449(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 18, 31, 166, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of 42MARU/sitebunny-13b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model 42MARU/sitebunny-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-23T17:55:45.857449(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
8530d1a9b107384d71c1173f7bc15f3f2b91e9c5
# Dataset Card for Evaluation run of winglian/llama-2-4b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/winglian/llama-2-4b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [winglian/llama-2-4b](https://huggingface.co/winglian/llama-2-4b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_winglian__llama-2-4b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-26T09:12:20.462280](https://huggingface.co/datasets/open-llm-leaderboard/details_winglian__llama-2-4b/blob/main/results_2023-10-26T09-12-20.462280.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0016778523489932886, "em_stderr": 0.0004191330178826824, "f1": 0.039830117449664484, "f1_stderr": 0.0011176272810803495, "acc": 0.2895672680207256, "acc_stderr": 0.007874406355324997 }, "harness|drop|3": { "em": 0.0016778523489932886, "em_stderr": 0.0004191330178826824, "f1": 0.039830117449664484, "f1_stderr": 0.0011176272810803495 }, "harness|gsm8k|5": { "acc": 0.004548900682335102, "acc_stderr": 0.0018535550440036202 }, "harness|winogrande|5": { "acc": 0.574585635359116, "acc_stderr": 0.013895257666646373 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_winglian__llama-2-4b
[ "region:us" ]
2023-10-03T13:22:56+00:00
{"pretty_name": "Evaluation run of winglian/llama-2-4b", "dataset_summary": "Dataset automatically created during the evaluation run of model [winglian/llama-2-4b](https://huggingface.co/winglian/llama-2-4b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_winglian__llama-2-4b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-26T09:12:20.462280](https://huggingface.co/datasets/open-llm-leaderboard/details_winglian__llama-2-4b/blob/main/results_2023-10-26T09-12-20.462280.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0016778523489932886,\n \"em_stderr\": 0.0004191330178826824,\n \"f1\": 0.039830117449664484,\n \"f1_stderr\": 0.0011176272810803495,\n \"acc\": 0.2895672680207256,\n \"acc_stderr\": 0.007874406355324997\n },\n \"harness|drop|3\": {\n \"em\": 0.0016778523489932886,\n \"em_stderr\": 0.0004191330178826824,\n \"f1\": 0.039830117449664484,\n \"f1_stderr\": 0.0011176272810803495\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.004548900682335102,\n \"acc_stderr\": 0.0018535550440036202\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.574585635359116,\n \"acc_stderr\": 0.013895257666646373\n }\n}\n```", "repo_url": "https://huggingface.co/winglian/llama-2-4b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|arc:challenge|25_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_26T09_12_20.462280", "path": ["**/details_harness|drop|3_2023-10-26T09-12-20.462280.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-26T09-12-20.462280.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_26T09_12_20.462280", "path": ["**/details_harness|gsm8k|5_2023-10-26T09-12-20.462280.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-26T09-12-20.462280.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hellaswag|10_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T14-22-33.156570.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T14-22-33.156570.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T14-22-33.156570.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_26T09_12_20.462280", "path": ["**/details_harness|winogrande|5_2023-10-26T09-12-20.462280.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-26T09-12-20.462280.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T14_22_33.156570", "path": ["results_2023-10-03T14-22-33.156570.parquet"]}, {"split": "2023_10_26T09_12_20.462280", "path": ["results_2023-10-26T09-12-20.462280.parquet"]}, {"split": "latest", "path": ["results_2023-10-26T09-12-20.462280.parquet"]}]}]}
2023-10-26T08:12:35+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of winglian/llama-2-4b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model winglian/llama-2-4b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-26T09:12:20.462280(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of winglian/llama-2-4b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model winglian/llama-2-4b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-26T09:12:20.462280(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of winglian/llama-2-4b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model winglian/llama-2-4b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-26T09:12:20.462280(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 18, 31, 166, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of winglian/llama-2-4b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model winglian/llama-2-4b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-26T09:12:20.462280(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
60a9a5ae6bd04b8c23fb93d7f40d2afa750facf1
# Dataset Card for Evaluation run of winglian/Llama-2-3b-hf ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/winglian/Llama-2-3b-hf - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [winglian/Llama-2-3b-hf](https://huggingface.co/winglian/Llama-2-3b-hf) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_winglian__Llama-2-3b-hf", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-24T02:31:53.066054](https://huggingface.co/datasets/open-llm-leaderboard/details_winglian__Llama-2-3b-hf/blob/main/results_2023-10-24T02-31-53.066054.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.01960989932885906, "em_stderr": 0.0014199622282460517, "f1": 0.026280411073825484, "f1_stderr": 0.0015385339771850702, "acc": 0.24822415153906865, "acc_stderr": 0.007026065573457934 }, "harness|drop|3": { "em": 0.01960989932885906, "em_stderr": 0.0014199622282460517, "f1": 0.026280411073825484, "f1_stderr": 0.0015385339771850702 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 }, "harness|winogrande|5": { "acc": 0.4964483030781373, "acc_stderr": 0.014052131146915867 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_winglian__Llama-2-3b-hf
[ "region:us" ]
2023-10-03T13:29:55+00:00
{"pretty_name": "Evaluation run of winglian/Llama-2-3b-hf", "dataset_summary": "Dataset automatically created during the evaluation run of model [winglian/Llama-2-3b-hf](https://huggingface.co/winglian/Llama-2-3b-hf) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_winglian__Llama-2-3b-hf\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-24T02:31:53.066054](https://huggingface.co/datasets/open-llm-leaderboard/details_winglian__Llama-2-3b-hf/blob/main/results_2023-10-24T02-31-53.066054.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.01960989932885906,\n \"em_stderr\": 0.0014199622282460517,\n \"f1\": 0.026280411073825484,\n \"f1_stderr\": 0.0015385339771850702,\n \"acc\": 0.24822415153906865,\n \"acc_stderr\": 0.007026065573457934\n },\n \"harness|drop|3\": {\n \"em\": 0.01960989932885906,\n \"em_stderr\": 0.0014199622282460517,\n \"f1\": 0.026280411073825484,\n \"f1_stderr\": 0.0015385339771850702\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.4964483030781373,\n \"acc_stderr\": 0.014052131146915867\n }\n}\n```", "repo_url": "https://huggingface.co/winglian/Llama-2-3b-hf", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|arc:challenge|25_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_24T02_31_53.066054", "path": ["**/details_harness|drop|3_2023-10-24T02-31-53.066054.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-24T02-31-53.066054.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_24T02_31_53.066054", "path": ["**/details_harness|gsm8k|5_2023-10-24T02-31-53.066054.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-24T02-31-53.066054.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hellaswag|10_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T14-29-31.026296.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T14-29-31.026296.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T14-29-31.026296.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_24T02_31_53.066054", "path": ["**/details_harness|winogrande|5_2023-10-24T02-31-53.066054.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-24T02-31-53.066054.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T14_29_31.026296", "path": ["results_2023-10-03T14-29-31.026296.parquet"]}, {"split": "2023_10_24T02_31_53.066054", "path": ["results_2023-10-24T02-31-53.066054.parquet"]}, {"split": "latest", "path": ["results_2023-10-24T02-31-53.066054.parquet"]}]}]}
2023-10-24T01:32:06+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of winglian/Llama-2-3b-hf ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model winglian/Llama-2-3b-hf on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-24T02:31:53.066054(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of winglian/Llama-2-3b-hf", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model winglian/Llama-2-3b-hf on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T02:31:53.066054(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of winglian/Llama-2-3b-hf", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model winglian/Llama-2-3b-hf on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T02:31:53.066054(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 21, 31, 169, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of winglian/Llama-2-3b-hf## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model winglian/Llama-2-3b-hf on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-24T02:31:53.066054(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
77bcc5343c8d796f1161f1fd82e81debd65e7951
# Dataset Card for "budget-seq2seq-xml" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
napatswift/budget-seq2seq
[ "region:us" ]
2023-10-03T13:29:56+00:00
{"dataset_info": {"features": [{"name": "line_item", "sequence": "string"}, {"name": "target", "dtype": "string"}, {"name": "format", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 134450572.0, "num_examples": 21510}], "download_size": 23772061, "dataset_size": 134450572.0}}
2023-10-05T05:29:46+00:00
[]
[]
TAGS #region-us
# Dataset Card for "budget-seq2seq-xml" More Information needed
[ "# Dataset Card for \"budget-seq2seq-xml\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"budget-seq2seq-xml\"\n\nMore Information needed" ]
[ 6, 19 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"budget-seq2seq-xml\"\n\nMore Information needed" ]
08e6fd5b44c7cec022c0dc369c8c6de66b32e5e8
# Dataset Card for Evaluation run of dfurman/llama-2-7b-instruct-peft ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/dfurman/llama-2-7b-instruct-peft - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [dfurman/llama-2-7b-instruct-peft](https://huggingface.co/dfurman/llama-2-7b-instruct-peft) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_dfurman__llama-2-7b-instruct-peft", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-24T03:15:50.340712](https://huggingface.co/datasets/open-llm-leaderboard/details_dfurman__llama-2-7b-instruct-peft/blob/main/results_2023-10-24T03-15-50.340712.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0010486577181208054, "em_stderr": 0.0003314581465219154, "f1": 0.05818687080536916, "f1_stderr": 0.0013326120366464343, "acc": 0.4020858403049834, "acc_stderr": 0.009398700998364592 }, "harness|drop|3": { "em": 0.0010486577181208054, "em_stderr": 0.0003314581465219154, "f1": 0.05818687080536916, "f1_stderr": 0.0013326120366464343 }, "harness|gsm8k|5": { "acc": 0.05989385898407885, "acc_stderr": 0.006536148151288708 }, "harness|winogrande|5": { "acc": 0.744277821625888, "acc_stderr": 0.012261253845440474 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_dfurman__llama-2-7b-instruct-peft
[ "region:us" ]
2023-10-03T13:30:01+00:00
{"pretty_name": "Evaluation run of dfurman/llama-2-7b-instruct-peft", "dataset_summary": "Dataset automatically created during the evaluation run of model [dfurman/llama-2-7b-instruct-peft](https://huggingface.co/dfurman/llama-2-7b-instruct-peft) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_dfurman__llama-2-7b-instruct-peft\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-24T03:15:50.340712](https://huggingface.co/datasets/open-llm-leaderboard/details_dfurman__llama-2-7b-instruct-peft/blob/main/results_2023-10-24T03-15-50.340712.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0010486577181208054,\n \"em_stderr\": 0.0003314581465219154,\n \"f1\": 0.05818687080536916,\n \"f1_stderr\": 0.0013326120366464343,\n \"acc\": 0.4020858403049834,\n \"acc_stderr\": 0.009398700998364592\n },\n \"harness|drop|3\": {\n \"em\": 0.0010486577181208054,\n \"em_stderr\": 0.0003314581465219154,\n \"f1\": 0.05818687080536916,\n \"f1_stderr\": 0.0013326120366464343\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.05989385898407885,\n \"acc_stderr\": 0.006536148151288708\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.744277821625888,\n \"acc_stderr\": 0.012261253845440474\n }\n}\n```", "repo_url": "https://huggingface.co/dfurman/llama-2-7b-instruct-peft", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|arc:challenge|25_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_24T03_15_50.340712", "path": ["**/details_harness|drop|3_2023-10-24T03-15-50.340712.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-24T03-15-50.340712.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_24T03_15_50.340712", "path": ["**/details_harness|gsm8k|5_2023-10-24T03-15-50.340712.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-24T03-15-50.340712.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hellaswag|10_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T14-29-36.510142.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T14-29-36.510142.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T14-29-36.510142.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_24T03_15_50.340712", "path": ["**/details_harness|winogrande|5_2023-10-24T03-15-50.340712.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-24T03-15-50.340712.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T14_29_36.510142", "path": ["results_2023-10-03T14-29-36.510142.parquet"]}, {"split": "2023_10_24T03_15_50.340712", "path": ["results_2023-10-24T03-15-50.340712.parquet"]}, {"split": "latest", "path": ["results_2023-10-24T03-15-50.340712.parquet"]}]}]}
2023-10-24T02:16:02+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of dfurman/llama-2-7b-instruct-peft ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model dfurman/llama-2-7b-instruct-peft on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-24T03:15:50.340712(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of dfurman/llama-2-7b-instruct-peft", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model dfurman/llama-2-7b-instruct-peft on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T03:15:50.340712(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of dfurman/llama-2-7b-instruct-peft", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model dfurman/llama-2-7b-instruct-peft on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T03:15:50.340712(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 24, 31, 172, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of dfurman/llama-2-7b-instruct-peft## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model dfurman/llama-2-7b-instruct-peft on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-24T03:15:50.340712(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
2c7dd7519771ee135fe3cae3393c902f17430fd0
# box * [data](https://huggingface.co/datasets/feynman-integrals-nn/box) * [model](https://huggingface.co/feynman-integrals-nn/box) * [source](https://gitlab.com/feynman-integrals-nn/feynman-integrals-nn/-/tree/main/box)
feynman-integrals-nn/box
[ "license:cc-by-4.0", "region:us" ]
2023-10-03T13:31:25+00:00
{"license": "cc-by-4.0"}
2023-11-14T20:55:30+00:00
[]
[]
TAGS #license-cc-by-4.0 #region-us
# box * data * model * source
[ "# box\n\n* data\n* model\n* source" ]
[ "TAGS\n#license-cc-by-4.0 #region-us \n", "# box\n\n* data\n* model\n* source" ]
[ 15, 8 ]
[ "passage: TAGS\n#license-cc-by-4.0 #region-us \n# box\n\n* data\n* model\n* source" ]
112760d993b140382844e383520571b178b64329
# Dataset Card for "general_instruction_with_reward_score_judged_by_13B_llama2" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
DialogueCharacter/english_general_instruction_with_reward_score_judged_by_13B_llama2
[ "region:us" ]
2023-10-03T13:34:05+00:00
{"dataset_info": {"features": [{"name": "input", "dtype": "string"}, {"name": "output", "dtype": "string"}, {"name": "reward_score", "dtype": "float64"}], "splits": [{"name": "train", "num_bytes": 3053305957, "num_examples": 1006809}], "download_size": 1633060464, "dataset_size": 3053305957}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-29T03:49:25+00:00
[]
[]
TAGS #region-us
# Dataset Card for "general_instruction_with_reward_score_judged_by_13B_llama2" More Information needed
[ "# Dataset Card for \"general_instruction_with_reward_score_judged_by_13B_llama2\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"general_instruction_with_reward_score_judged_by_13B_llama2\"\n\nMore Information needed" ]
[ 6, 34 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"general_instruction_with_reward_score_judged_by_13B_llama2\"\n\nMore Information needed" ]
23032e9db65dc0d47844c5eda062854ad838cef4
# People with Guns Segmentation & Detection Dataset The dataset consists of photos depicting **individuals holding guns**. It specifically focuses on the **segmentation** of guns within these images and the **detection** of people holding guns. Each image in the dataset presents a different scenario, capturing individuals from various *backgrounds, genders, and age groups in different poses* while holding guns. The dataset is an essential resource for the development and evaluation of computer vision models and algorithms in fields related to *firearms recognition, security systems, law enforcement, and safety analysis*. ![](https://www.googleapis.com/download/storage/v1/b/kaggle-user-content/o/inbox%2F12421376%2F2497edebcdd1b7c4bc5471262bf5bd16%2FFrame%2029.png?generation=1696334547549518&alt=media) # Get the dataset ### This is just an example of the data Leave a request on [**https://trainingdata.pro/data-market**](https://trainingdata.pro/data-market?utm_source=huggingface&utm_medium=cpc&utm_campaign=people-with-guns-segmentation-and-detection) to discuss your requirements, learn about the price and buy the dataset. # Dataset structure - **images** - contains of original images with people holding guns - **labels** - includes visualized labeling created for the original images - **annotations.xml** - contains coordinates of the polygons and bounding boxes, created for the original photo # Data Format Each image from `images` folder is accompanied by an XML-annotation in the `annotations.xml` file indicating the coordinates of the bounding boxes and polygons. For each point, the x and y coordinates are provided. ### Сlasses: - **person**: person, who holds the gun, detected with a bounding box, - **gun**: gun, labeled with a polygon # Example of XML file structure ![](https://www.googleapis.com/download/storage/v1/b/kaggle-user-content/o/inbox%2F12421376%2F96bbe14c80f4b494f97136f8ffdbaa44%2Fcarbon.png?generation=1696335385101390&alt=media) # People with Guns Segmentation & Detection might be made in accordance with your requirements. ## **[TrainingData](https://trainingdata.pro/data-market?utm_source=huggingface&utm_medium=cpc&utm_campaign=people-with-guns-segmentation-and-detection)** provides high-quality data annotation tailored to your needs More datasets in TrainingData's Kaggle account: **https://www.kaggle.com/trainingdatapro/datasets** TrainingData's GitHub: **https://github.com/trainingdata-pro**
TrainingDataPro/people-with-guns-segmentation-and-detection
[ "task_categories:image-segmentation", "task_categories:object-detection", "language:en", "license:cc-by-nc-nd-4.0", "code", "finance", "legal", "region:us" ]
2023-10-03T13:47:31+00:00
{"language": ["en"], "license": "cc-by-nc-nd-4.0", "task_categories": ["image-segmentation", "object-detection"], "tags": ["code", "finance", "legal"], "dataset_info": {"config_name": "people-with-guns-segmentation-and-detection", "features": [{"name": "id", "dtype": "int32"}, {"name": "name", "dtype": "string"}, {"name": "image", "dtype": "image"}, {"name": "mask", "dtype": "image"}, {"name": "width", "dtype": "uint16"}, {"name": "height", "dtype": "uint16"}, {"name": "shapes", "sequence": [{"name": "label", "dtype": {"class_label": {"names": {"0": "person", "1": "gun"}}}}, {"name": "type", "dtype": "string"}, {"name": "points", "sequence": {"sequence": "float32"}}, {"name": "rotation", "dtype": "float32"}, {"name": "occluded", "dtype": "uint8"}, {"name": "z_order", "dtype": "int16"}, {"name": "attributes", "sequence": [{"name": "name", "dtype": "string"}, {"name": "text", "dtype": "string"}]}]}], "splits": [{"name": "train", "num_bytes": 42149, "num_examples": 11}], "download_size": 69561417, "dataset_size": 42149}}
2023-10-12T06:07:40+00:00
[]
[ "en" ]
TAGS #task_categories-image-segmentation #task_categories-object-detection #language-English #license-cc-by-nc-nd-4.0 #code #finance #legal #region-us
# People with Guns Segmentation & Detection Dataset The dataset consists of photos depicting individuals holding guns. It specifically focuses on the segmentation of guns within these images and the detection of people holding guns. Each image in the dataset presents a different scenario, capturing individuals from various *backgrounds, genders, and age groups in different poses* while holding guns. The dataset is an essential resource for the development and evaluation of computer vision models and algorithms in fields related to *firearms recognition, security systems, law enforcement, and safety analysis*. ![](URL # Get the dataset ### This is just an example of the data Leave a request on URL to discuss your requirements, learn about the price and buy the dataset. # Dataset structure - images - contains of original images with people holding guns - labels - includes visualized labeling created for the original images - URL - contains coordinates of the polygons and bounding boxes, created for the original photo # Data Format Each image from 'images' folder is accompanied by an XML-annotation in the 'URL' file indicating the coordinates of the bounding boxes and polygons. For each point, the x and y coordinates are provided. ### Сlasses: - person: person, who holds the gun, detected with a bounding box, - gun: gun, labeled with a polygon # Example of XML file structure ![](URL # People with Guns Segmentation & Detection might be made in accordance with your requirements. ## TrainingData provides high-quality data annotation tailored to your needs More datasets in TrainingData's Kaggle account: URL TrainingData's GitHub: URL
[ "# People with Guns Segmentation & Detection Dataset\nThe dataset consists of photos depicting individuals holding guns. It specifically focuses on the segmentation of guns within these images and the detection of people holding guns. \n\nEach image in the dataset presents a different scenario, capturing individuals from various *backgrounds, genders, and age groups in different poses* while holding guns. \n\nThe dataset is an essential resource for the development and evaluation of computer vision models and algorithms in fields related to *firearms recognition, security systems, law enforcement, and safety analysis*.\n\n![](URL", "# Get the dataset", "### This is just an example of the data\n\nLeave a request on URL to discuss your requirements, learn about the price and buy the dataset.", "# Dataset structure\n- images - contains of original images with people holding guns\n- labels - includes visualized labeling created for the original images\n- URL - contains coordinates of the polygons and bounding boxes, created for the original photo", "# Data Format\n\nEach image from 'images' folder is accompanied by an XML-annotation in the 'URL' file indicating the coordinates of the bounding boxes and polygons. For each point, the x and y coordinates are provided.", "### Сlasses:\n- person: person, who holds the gun, detected with a bounding box,\n- gun: gun, labeled with a polygon", "# Example of XML file structure\n\n![](URL", "# People with Guns Segmentation & Detection might be made in accordance with your requirements.", "## TrainingData provides high-quality data annotation tailored to your needs\n\nMore datasets in TrainingData's Kaggle account: URL\n\nTrainingData's GitHub: URL" ]
[ "TAGS\n#task_categories-image-segmentation #task_categories-object-detection #language-English #license-cc-by-nc-nd-4.0 #code #finance #legal #region-us \n", "# People with Guns Segmentation & Detection Dataset\nThe dataset consists of photos depicting individuals holding guns. It specifically focuses on the segmentation of guns within these images and the detection of people holding guns. \n\nEach image in the dataset presents a different scenario, capturing individuals from various *backgrounds, genders, and age groups in different poses* while holding guns. \n\nThe dataset is an essential resource for the development and evaluation of computer vision models and algorithms in fields related to *firearms recognition, security systems, law enforcement, and safety analysis*.\n\n![](URL", "# Get the dataset", "### This is just an example of the data\n\nLeave a request on URL to discuss your requirements, learn about the price and buy the dataset.", "# Dataset structure\n- images - contains of original images with people holding guns\n- labels - includes visualized labeling created for the original images\n- URL - contains coordinates of the polygons and bounding boxes, created for the original photo", "# Data Format\n\nEach image from 'images' folder is accompanied by an XML-annotation in the 'URL' file indicating the coordinates of the bounding boxes and polygons. For each point, the x and y coordinates are provided.", "### Сlasses:\n- person: person, who holds the gun, detected with a bounding box,\n- gun: gun, labeled with a polygon", "# Example of XML file structure\n\n![](URL", "# People with Guns Segmentation & Detection might be made in accordance with your requirements.", "## TrainingData provides high-quality data annotation tailored to your needs\n\nMore datasets in TrainingData's Kaggle account: URL\n\nTrainingData's GitHub: URL" ]
[ 53, 135, 5, 30, 54, 57, 36, 12, 20, 39 ]
[ "passage: TAGS\n#task_categories-image-segmentation #task_categories-object-detection #language-English #license-cc-by-nc-nd-4.0 #code #finance #legal #region-us \n# People with Guns Segmentation & Detection Dataset\nThe dataset consists of photos depicting individuals holding guns. It specifically focuses on the segmentation of guns within these images and the detection of people holding guns. \n\nEach image in the dataset presents a different scenario, capturing individuals from various *backgrounds, genders, and age groups in different poses* while holding guns. \n\nThe dataset is an essential resource for the development and evaluation of computer vision models and algorithms in fields related to *firearms recognition, security systems, law enforcement, and safety analysis*.\n\n![](URL# Get the dataset### This is just an example of the data\n\nLeave a request on URL to discuss your requirements, learn about the price and buy the dataset.# Dataset structure\n- images - contains of original images with people holding guns\n- labels - includes visualized labeling created for the original images\n- URL - contains coordinates of the polygons and bounding boxes, created for the original photo# Data Format\n\nEach image from 'images' folder is accompanied by an XML-annotation in the 'URL' file indicating the coordinates of the bounding boxes and polygons. For each point, the x and y coordinates are provided.### Сlasses:\n- person: person, who holds the gun, detected with a bounding box,\n- gun: gun, labeled with a polygon# Example of XML file structure\n\n![](URL# People with Guns Segmentation & Detection might be made in accordance with your requirements.## TrainingData provides high-quality data annotation tailored to your needs\n\nMore datasets in TrainingData's Kaggle account: URL\n\nTrainingData's GitHub: URL" ]
7b74806fb506e97e76c49290674e8b2fc99849c3
# Dataset Card for "social_media_dataset" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
kevinknights29/social_media_dataset
[ "region:us" ]
2023-10-03T13:47:43+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "eval", "path": "data/eval-*"}]}], "dataset_info": {"features": [{"name": "product_information", "dtype": "string"}, {"name": "topic", "dtype": "string"}, {"name": "social_media_content", "dtype": "string"}, {"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 20899, "num_examples": 5}, {"name": "eval", "num_bytes": 3017, "num_examples": 1}], "download_size": 61424, "dataset_size": 23916}}
2023-10-03T13:47:46+00:00
[]
[]
TAGS #region-us
# Dataset Card for "social_media_dataset" More Information needed
[ "# Dataset Card for \"social_media_dataset\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"social_media_dataset\"\n\nMore Information needed" ]
[ 6, 16 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"social_media_dataset\"\n\nMore Information needed" ]
1e388b2a453d76b8f1b92ff0566d3e44c359ea9c
# Dataset Card for "SampleDataset" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
asgaardlab/SampleDataset
[ "region:us" ]
2023-10-03T13:49:43+00:00
{"dataset_info": {"features": [{"name": "Buggy Image", "dtype": "image"}, {"name": "Correct Image", "dtype": "image"}, {"name": "Segmentation Image (Bug)", "dtype": "image"}, {"name": "Segmentation Image (Correct)", "dtype": "image"}, {"name": "Description", "dtype": "string"}, {"name": "Tag", "dtype": "string"}, {"name": "Objects JSON (Bug)", "dtype": "string"}, {"name": "Objects JSON (Correct)", "dtype": "string"}, {"name": "Victim Name", "dtype": "string"}, {"name": "Victim Color", "sequence": "int64"}], "splits": [{"name": "validation", "num_bytes": 44919090.0, "num_examples": 70}], "download_size": 43196423, "dataset_size": 44919090.0}, "configs": [{"config_name": "default", "data_files": [{"split": "validation", "path": "data/validation-*"}]}]}
2023-10-16T16:47:13+00:00
[]
[]
TAGS #region-us
# Dataset Card for "SampleDataset" More Information needed
[ "# Dataset Card for \"SampleDataset\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"SampleDataset\"\n\nMore Information needed" ]
[ 6, 14 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"SampleDataset\"\n\nMore Information needed" ]
92c1d5eac04b585710256844d8b720f144ea3397
# Dataset Card for Evaluation run of PulsarAI/MythoMax-L2-LoRA-Assemble-13B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/PulsarAI/MythoMax-L2-LoRA-Assemble-13B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [PulsarAI/MythoMax-L2-LoRA-Assemble-13B](https://huggingface.co/PulsarAI/MythoMax-L2-LoRA-Assemble-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_PulsarAI__MythoMax-L2-LoRA-Assemble-13B", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-10-03T14:58:01.778055](https://huggingface.co/datasets/open-llm-leaderboard/details_PulsarAI__MythoMax-L2-LoRA-Assemble-13B/blob/main/results_2023-10-03T14-58-01.778055.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.598938175511998, "acc_stderr": 0.03385413189247629, "acc_norm": 0.6028583107012461, "acc_norm_stderr": 0.03383158640553202, "mc1": 0.40514075887392903, "mc1_stderr": 0.01718561172775337, "mc2": 0.5594181501740189, "mc2_stderr": 0.015699414732693026 }, "harness|arc:challenge|25": { "acc": 0.6040955631399317, "acc_stderr": 0.014291228393536587, "acc_norm": 0.636518771331058, "acc_norm_stderr": 0.014056207319068283 }, "harness|hellaswag|10": { "acc": 0.6358295160326628, "acc_stderr": 0.004802133511654241, "acc_norm": 0.8346942840071699, "acc_norm_stderr": 0.003706970856410953 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5185185185185185, "acc_stderr": 0.043163785995113245, "acc_norm": 0.5185185185185185, "acc_norm_stderr": 0.043163785995113245 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.618421052631579, "acc_stderr": 0.03953173377749194, "acc_norm": 0.618421052631579, "acc_norm_stderr": 0.03953173377749194 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6188679245283019, "acc_stderr": 0.029890609686286637, "acc_norm": 0.6188679245283019, "acc_norm_stderr": 0.029890609686286637 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6597222222222222, "acc_stderr": 0.039621355734862175, "acc_norm": 0.6597222222222222, "acc_norm_stderr": 0.039621355734862175 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6011560693641619, "acc_stderr": 0.037336266553835096, "acc_norm": 0.6011560693641619, "acc_norm_stderr": 0.037336266553835096 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.04690650298201942, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.04690650298201942 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4978723404255319, "acc_stderr": 0.03268572658667492, "acc_norm": 0.4978723404255319, "acc_norm_stderr": 0.03268572658667492 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.35964912280701755, "acc_stderr": 0.04514496132873634, "acc_norm": 0.35964912280701755, "acc_norm_stderr": 0.04514496132873634 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5724137931034483, "acc_stderr": 0.04122737111370333, "acc_norm": 0.5724137931034483, "acc_norm_stderr": 0.04122737111370333 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3492063492063492, "acc_stderr": 0.024552292209342658, "acc_norm": 0.3492063492063492, "acc_norm_stderr": 0.024552292209342658 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.36507936507936506, "acc_stderr": 0.043062412591271526, "acc_norm": 0.36507936507936506, "acc_norm_stderr": 0.043062412591271526 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.667741935483871, "acc_stderr": 0.0267955608481228, "acc_norm": 0.667741935483871, "acc_norm_stderr": 0.0267955608481228 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4876847290640394, "acc_stderr": 0.035169204442208966, "acc_norm": 0.4876847290640394, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.62, "acc_stderr": 0.04878317312145632, "acc_norm": 0.62, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7272727272727273, "acc_stderr": 0.0347769116216366, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.0347769116216366 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7777777777777778, "acc_stderr": 0.029620227874790482, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.029620227874790482 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8808290155440415, "acc_stderr": 0.02338193534812143, "acc_norm": 0.8808290155440415, "acc_norm_stderr": 0.02338193534812143 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6205128205128205, "acc_stderr": 0.02460362692409742, "acc_norm": 0.6205128205128205, "acc_norm_stderr": 0.02460362692409742 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.32222222222222224, "acc_stderr": 0.028493465091028597, "acc_norm": 0.32222222222222224, "acc_norm_stderr": 0.028493465091028597 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5966386554621849, "acc_stderr": 0.031866081214088314, "acc_norm": 0.5966386554621849, "acc_norm_stderr": 0.031866081214088314 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.304635761589404, "acc_stderr": 0.03757949922943342, "acc_norm": 0.304635761589404, "acc_norm_stderr": 0.03757949922943342 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7944954128440367, "acc_stderr": 0.01732435232501602, "acc_norm": 0.7944954128440367, "acc_norm_stderr": 0.01732435232501602 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.42592592592592593, "acc_stderr": 0.033723432716530645, "acc_norm": 0.42592592592592593, "acc_norm_stderr": 0.033723432716530645 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8333333333333334, "acc_stderr": 0.02615686752393104, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.02615686752393104 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7848101265822784, "acc_stderr": 0.02675082699467617, "acc_norm": 0.7848101265822784, "acc_norm_stderr": 0.02675082699467617 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6860986547085202, "acc_stderr": 0.03114679648297246, "acc_norm": 0.6860986547085202, "acc_norm_stderr": 0.03114679648297246 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6793893129770993, "acc_stderr": 0.04093329229834278, "acc_norm": 0.6793893129770993, "acc_norm_stderr": 0.04093329229834278 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7272727272727273, "acc_stderr": 0.04065578140908706, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.04065578140908706 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7685185185185185, "acc_stderr": 0.04077494709252627, "acc_norm": 0.7685185185185185, "acc_norm_stderr": 0.04077494709252627 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6993865030674846, "acc_stderr": 0.03602511318806771, "acc_norm": 0.6993865030674846, "acc_norm_stderr": 0.03602511318806771 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4017857142857143, "acc_stderr": 0.04653333146973646, "acc_norm": 0.4017857142857143, "acc_norm_stderr": 0.04653333146973646 }, "harness|hendrycksTest-management|5": { "acc": 0.7572815533980582, "acc_stderr": 0.04245022486384495, "acc_norm": 0.7572815533980582, "acc_norm_stderr": 0.04245022486384495 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8461538461538461, "acc_stderr": 0.023636873317489294, "acc_norm": 0.8461538461538461, "acc_norm_stderr": 0.023636873317489294 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.59, "acc_stderr": 0.049431107042371025, "acc_norm": 0.59, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7956577266922095, "acc_stderr": 0.0144191239809319, "acc_norm": 0.7956577266922095, "acc_norm_stderr": 0.0144191239809319 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.661849710982659, "acc_stderr": 0.025469770149400172, "acc_norm": 0.661849710982659, "acc_norm_stderr": 0.025469770149400172 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.48044692737430167, "acc_stderr": 0.016709709877661995, "acc_norm": 0.48044692737430167, "acc_norm_stderr": 0.016709709877661995 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6699346405228758, "acc_stderr": 0.026925654653615693, "acc_norm": 0.6699346405228758, "acc_norm_stderr": 0.026925654653615693 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6881028938906752, "acc_stderr": 0.026311858071854155, "acc_norm": 0.6881028938906752, "acc_norm_stderr": 0.026311858071854155 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7067901234567902, "acc_stderr": 0.025329888171900922, "acc_norm": 0.7067901234567902, "acc_norm_stderr": 0.025329888171900922 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.46808510638297873, "acc_stderr": 0.029766675075873866, "acc_norm": 0.46808510638297873, "acc_norm_stderr": 0.029766675075873866 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46088657105606257, "acc_stderr": 0.012731102790504526, "acc_norm": 0.46088657105606257, "acc_norm_stderr": 0.012731102790504526 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6102941176470589, "acc_stderr": 0.0296246635811597, "acc_norm": 0.6102941176470589, "acc_norm_stderr": 0.0296246635811597 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5898692810457516, "acc_stderr": 0.019898412717635903, "acc_norm": 0.5898692810457516, "acc_norm_stderr": 0.019898412717635903 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6636363636363637, "acc_stderr": 0.04525393596302505, "acc_norm": 0.6636363636363637, "acc_norm_stderr": 0.04525393596302505 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6775510204081633, "acc_stderr": 0.02992310056368391, "acc_norm": 0.6775510204081633, "acc_norm_stderr": 0.02992310056368391 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7611940298507462, "acc_stderr": 0.03014777593540922, "acc_norm": 0.7611940298507462, "acc_norm_stderr": 0.03014777593540922 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.83, "acc_stderr": 0.0377525168068637, "acc_norm": 0.83, "acc_norm_stderr": 0.0377525168068637 }, "harness|hendrycksTest-virology|5": { "acc": 0.4879518072289157, "acc_stderr": 0.03891364495835821, "acc_norm": 0.4879518072289157, "acc_norm_stderr": 0.03891364495835821 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8011695906432749, "acc_stderr": 0.030611116557432528, "acc_norm": 0.8011695906432749, "acc_norm_stderr": 0.030611116557432528 }, "harness|truthfulqa:mc|0": { "mc1": 0.40514075887392903, "mc1_stderr": 0.01718561172775337, "mc2": 0.5594181501740189, "mc2_stderr": 0.015699414732693026 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_PulsarAI__MythoMax-L2-LoRA-Assemble-13B
[ "region:us" ]
2023-10-03T13:58:25+00:00
{"pretty_name": "Evaluation run of PulsarAI/MythoMax-L2-LoRA-Assemble-13B", "dataset_summary": "Dataset automatically created during the evaluation run of model [PulsarAI/MythoMax-L2-LoRA-Assemble-13B](https://huggingface.co/PulsarAI/MythoMax-L2-LoRA-Assemble-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_PulsarAI__MythoMax-L2-LoRA-Assemble-13B\",\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-03T14:58:01.778055](https://huggingface.co/datasets/open-llm-leaderboard/details_PulsarAI__MythoMax-L2-LoRA-Assemble-13B/blob/main/results_2023-10-03T14-58-01.778055.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.598938175511998,\n \"acc_stderr\": 0.03385413189247629,\n \"acc_norm\": 0.6028583107012461,\n \"acc_norm_stderr\": 0.03383158640553202,\n \"mc1\": 0.40514075887392903,\n \"mc1_stderr\": 0.01718561172775337,\n \"mc2\": 0.5594181501740189,\n \"mc2_stderr\": 0.015699414732693026\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6040955631399317,\n \"acc_stderr\": 0.014291228393536587,\n \"acc_norm\": 0.636518771331058,\n \"acc_norm_stderr\": 0.014056207319068283\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6358295160326628,\n \"acc_stderr\": 0.004802133511654241,\n \"acc_norm\": 0.8346942840071699,\n \"acc_norm_stderr\": 0.003706970856410953\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5185185185185185,\n \"acc_stderr\": 0.043163785995113245,\n \"acc_norm\": 0.5185185185185185,\n \"acc_norm_stderr\": 0.043163785995113245\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.618421052631579,\n \"acc_stderr\": 0.03953173377749194,\n \"acc_norm\": 0.618421052631579,\n \"acc_norm_stderr\": 0.03953173377749194\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6188679245283019,\n \"acc_stderr\": 0.029890609686286637,\n \"acc_norm\": 0.6188679245283019,\n \"acc_norm_stderr\": 0.029890609686286637\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6597222222222222,\n \"acc_stderr\": 0.039621355734862175,\n \"acc_norm\": 0.6597222222222222,\n \"acc_norm_stderr\": 0.039621355734862175\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6011560693641619,\n \"acc_stderr\": 0.037336266553835096,\n \"acc_norm\": 0.6011560693641619,\n \"acc_norm_stderr\": 0.037336266553835096\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.04690650298201942,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.04690650298201942\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.4978723404255319,\n \"acc_stderr\": 0.03268572658667492,\n \"acc_norm\": 0.4978723404255319,\n \"acc_norm_stderr\": 0.03268572658667492\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.35964912280701755,\n \"acc_stderr\": 0.04514496132873634,\n \"acc_norm\": 0.35964912280701755,\n \"acc_norm_stderr\": 0.04514496132873634\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5724137931034483,\n \"acc_stderr\": 0.04122737111370333,\n \"acc_norm\": 0.5724137931034483,\n \"acc_norm_stderr\": 0.04122737111370333\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3492063492063492,\n \"acc_stderr\": 0.024552292209342658,\n \"acc_norm\": 0.3492063492063492,\n \"acc_norm_stderr\": 0.024552292209342658\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.36507936507936506,\n \"acc_stderr\": 0.043062412591271526,\n \"acc_norm\": 0.36507936507936506,\n \"acc_norm_stderr\": 0.043062412591271526\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.667741935483871,\n \"acc_stderr\": 0.0267955608481228,\n \"acc_norm\": 0.667741935483871,\n \"acc_norm_stderr\": 0.0267955608481228\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4876847290640394,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.4876847290640394,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7272727272727273,\n \"acc_stderr\": 0.0347769116216366,\n \"acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.0347769116216366\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.029620227874790482,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.029620227874790482\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8808290155440415,\n \"acc_stderr\": 0.02338193534812143,\n \"acc_norm\": 0.8808290155440415,\n \"acc_norm_stderr\": 0.02338193534812143\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6205128205128205,\n \"acc_stderr\": 0.02460362692409742,\n \"acc_norm\": 0.6205128205128205,\n \"acc_norm_stderr\": 0.02460362692409742\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.32222222222222224,\n \"acc_stderr\": 0.028493465091028597,\n \"acc_norm\": 0.32222222222222224,\n \"acc_norm_stderr\": 0.028493465091028597\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.5966386554621849,\n \"acc_stderr\": 0.031866081214088314,\n \"acc_norm\": 0.5966386554621849,\n \"acc_norm_stderr\": 0.031866081214088314\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.304635761589404,\n \"acc_stderr\": 0.03757949922943342,\n \"acc_norm\": 0.304635761589404,\n \"acc_norm_stderr\": 0.03757949922943342\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7944954128440367,\n \"acc_stderr\": 0.01732435232501602,\n \"acc_norm\": 0.7944954128440367,\n \"acc_norm_stderr\": 0.01732435232501602\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.42592592592592593,\n \"acc_stderr\": 0.033723432716530645,\n \"acc_norm\": 0.42592592592592593,\n \"acc_norm_stderr\": 0.033723432716530645\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8333333333333334,\n \"acc_stderr\": 0.02615686752393104,\n \"acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.02615686752393104\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7848101265822784,\n \"acc_stderr\": 0.02675082699467617,\n \"acc_norm\": 0.7848101265822784,\n \"acc_norm_stderr\": 0.02675082699467617\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6860986547085202,\n \"acc_stderr\": 0.03114679648297246,\n \"acc_norm\": 0.6860986547085202,\n \"acc_norm_stderr\": 0.03114679648297246\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6793893129770993,\n \"acc_stderr\": 0.04093329229834278,\n \"acc_norm\": 0.6793893129770993,\n \"acc_norm_stderr\": 0.04093329229834278\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7272727272727273,\n \"acc_stderr\": 0.04065578140908706,\n \"acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.04065578140908706\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7685185185185185,\n \"acc_stderr\": 0.04077494709252627,\n \"acc_norm\": 0.7685185185185185,\n \"acc_norm_stderr\": 0.04077494709252627\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6993865030674846,\n \"acc_stderr\": 0.03602511318806771,\n \"acc_norm\": 0.6993865030674846,\n \"acc_norm_stderr\": 0.03602511318806771\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4017857142857143,\n \"acc_stderr\": 0.04653333146973646,\n \"acc_norm\": 0.4017857142857143,\n \"acc_norm_stderr\": 0.04653333146973646\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7572815533980582,\n \"acc_stderr\": 0.04245022486384495,\n \"acc_norm\": 0.7572815533980582,\n \"acc_norm_stderr\": 0.04245022486384495\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8461538461538461,\n \"acc_stderr\": 0.023636873317489294,\n \"acc_norm\": 0.8461538461538461,\n \"acc_norm_stderr\": 0.023636873317489294\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.59,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7956577266922095,\n \"acc_stderr\": 0.0144191239809319,\n \"acc_norm\": 0.7956577266922095,\n \"acc_norm_stderr\": 0.0144191239809319\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.661849710982659,\n \"acc_stderr\": 0.025469770149400172,\n \"acc_norm\": 0.661849710982659,\n \"acc_norm_stderr\": 0.025469770149400172\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.48044692737430167,\n \"acc_stderr\": 0.016709709877661995,\n \"acc_norm\": 0.48044692737430167,\n \"acc_norm_stderr\": 0.016709709877661995\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6699346405228758,\n \"acc_stderr\": 0.026925654653615693,\n \"acc_norm\": 0.6699346405228758,\n \"acc_norm_stderr\": 0.026925654653615693\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6881028938906752,\n \"acc_stderr\": 0.026311858071854155,\n \"acc_norm\": 0.6881028938906752,\n \"acc_norm_stderr\": 0.026311858071854155\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7067901234567902,\n \"acc_stderr\": 0.025329888171900922,\n \"acc_norm\": 0.7067901234567902,\n \"acc_norm_stderr\": 0.025329888171900922\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.46808510638297873,\n \"acc_stderr\": 0.029766675075873866,\n \"acc_norm\": 0.46808510638297873,\n \"acc_norm_stderr\": 0.029766675075873866\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46088657105606257,\n \"acc_stderr\": 0.012731102790504526,\n \"acc_norm\": 0.46088657105606257,\n \"acc_norm_stderr\": 0.012731102790504526\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6102941176470589,\n \"acc_stderr\": 0.0296246635811597,\n \"acc_norm\": 0.6102941176470589,\n \"acc_norm_stderr\": 0.0296246635811597\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.5898692810457516,\n \"acc_stderr\": 0.019898412717635903,\n \"acc_norm\": 0.5898692810457516,\n \"acc_norm_stderr\": 0.019898412717635903\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n \"acc_stderr\": 0.04525393596302505,\n \"acc_norm\": 0.6636363636363637,\n \"acc_norm_stderr\": 0.04525393596302505\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6775510204081633,\n \"acc_stderr\": 0.02992310056368391,\n \"acc_norm\": 0.6775510204081633,\n \"acc_norm_stderr\": 0.02992310056368391\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7611940298507462,\n \"acc_stderr\": 0.03014777593540922,\n \"acc_norm\": 0.7611940298507462,\n \"acc_norm_stderr\": 0.03014777593540922\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.83,\n \"acc_stderr\": 0.0377525168068637,\n \"acc_norm\": 0.83,\n \"acc_norm_stderr\": 0.0377525168068637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4879518072289157,\n \"acc_stderr\": 0.03891364495835821,\n \"acc_norm\": 0.4879518072289157,\n \"acc_norm_stderr\": 0.03891364495835821\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8011695906432749,\n \"acc_stderr\": 0.030611116557432528,\n \"acc_norm\": 0.8011695906432749,\n \"acc_norm_stderr\": 0.030611116557432528\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.40514075887392903,\n \"mc1_stderr\": 0.01718561172775337,\n \"mc2\": 0.5594181501740189,\n \"mc2_stderr\": 0.015699414732693026\n }\n}\n```", "repo_url": "https://huggingface.co/PulsarAI/MythoMax-L2-LoRA-Assemble-13B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|arc:challenge|25_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hellaswag|10_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T14-58-01.778055.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T14-58-01.778055.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T14_58_01.778055", "path": ["results_2023-10-03T14-58-01.778055.parquet"]}, {"split": "latest", "path": ["results_2023-10-03T14-58-01.778055.parquet"]}]}]}
2023-10-03T13:59:27+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of PulsarAI/MythoMax-L2-LoRA-Assemble-13B ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model PulsarAI/MythoMax-L2-LoRA-Assemble-13B on the Open LLM Leaderboard. The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-03T14:58:01.778055(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of PulsarAI/MythoMax-L2-LoRA-Assemble-13B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model PulsarAI/MythoMax-L2-LoRA-Assemble-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-03T14:58:01.778055(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of PulsarAI/MythoMax-L2-LoRA-Assemble-13B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model PulsarAI/MythoMax-L2-LoRA-Assemble-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-03T14:58:01.778055(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 27, 31, 175, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of PulsarAI/MythoMax-L2-LoRA-Assemble-13B## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model PulsarAI/MythoMax-L2-LoRA-Assemble-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-03T14:58:01.778055(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
a9558cade7c6ccc0de1217f75a1671f142e210c3
# Dataset Card for Evaluation run of NousResearch/Capybara-7B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/NousResearch/Capybara-7B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [NousResearch/Capybara-7B](https://huggingface.co/NousResearch/Capybara-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_NousResearch__Capybara-7B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-29T02:30:32.781976](https://huggingface.co/datasets/open-llm-leaderboard/details_NousResearch__Capybara-7B/blob/main/results_2023-10-29T02-30-32.781976.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.2792575503355705, "em_stderr": 0.004594435554175769, "f1": 0.33824874161073976, "f1_stderr": 0.004551451269813922, "acc": 0.4015045121001835, "acc_stderr": 0.009699659493944818 }, "harness|drop|3": { "em": 0.2792575503355705, "em_stderr": 0.004594435554175769, "f1": 0.33824874161073976, "f1_stderr": 0.004551451269813922 }, "harness|gsm8k|5": { "acc": 0.06899166034874905, "acc_stderr": 0.006980995834838586 }, "harness|winogrande|5": { "acc": 0.734017363851618, "acc_stderr": 0.012418323153051051 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_NousResearch__Capybara-7B
[ "region:us" ]
2023-10-03T14:12:15+00:00
{"pretty_name": "Evaluation run of NousResearch/Capybara-7B", "dataset_summary": "Dataset automatically created during the evaluation run of model [NousResearch/Capybara-7B](https://huggingface.co/NousResearch/Capybara-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_NousResearch__Capybara-7B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-29T02:30:32.781976](https://huggingface.co/datasets/open-llm-leaderboard/details_NousResearch__Capybara-7B/blob/main/results_2023-10-29T02-30-32.781976.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.2792575503355705,\n \"em_stderr\": 0.004594435554175769,\n \"f1\": 0.33824874161073976,\n \"f1_stderr\": 0.004551451269813922,\n \"acc\": 0.4015045121001835,\n \"acc_stderr\": 0.009699659493944818\n },\n \"harness|drop|3\": {\n \"em\": 0.2792575503355705,\n \"em_stderr\": 0.004594435554175769,\n \"f1\": 0.33824874161073976,\n \"f1_stderr\": 0.004551451269813922\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.06899166034874905,\n \"acc_stderr\": 0.006980995834838586\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.734017363851618,\n \"acc_stderr\": 0.012418323153051051\n }\n}\n```", "repo_url": "https://huggingface.co/NousResearch/Capybara-7B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|arc:challenge|25_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_29T02_30_32.781976", "path": ["**/details_harness|drop|3_2023-10-29T02-30-32.781976.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-29T02-30-32.781976.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_29T02_30_32.781976", "path": ["**/details_harness|gsm8k|5_2023-10-29T02-30-32.781976.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-29T02-30-32.781976.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hellaswag|10_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T15-11-52.026776.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T15-11-52.026776.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T15-11-52.026776.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_29T02_30_32.781976", "path": ["**/details_harness|winogrande|5_2023-10-29T02-30-32.781976.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-29T02-30-32.781976.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T15_11_52.026776", "path": ["results_2023-10-03T15-11-52.026776.parquet"]}, {"split": "2023_10_29T02_30_32.781976", "path": ["results_2023-10-29T02-30-32.781976.parquet"]}, {"split": "latest", "path": ["results_2023-10-29T02-30-32.781976.parquet"]}]}]}
2023-10-29T02:30:45+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of NousResearch/Capybara-7B ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model NousResearch/Capybara-7B on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-29T02:30:32.781976(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of NousResearch/Capybara-7B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model NousResearch/Capybara-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-29T02:30:32.781976(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of NousResearch/Capybara-7B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model NousResearch/Capybara-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-29T02:30:32.781976(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 18, 31, 166, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of NousResearch/Capybara-7B## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model NousResearch/Capybara-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-29T02:30:32.781976(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
f2e19c4b8468a016bb1d75d21ef9f50dca63d7e2
Required data files for using the TRUST model for skin tone analysis for the [DALL-Eval: Probing the Reasoning Skills and Social Biases of Text-to-Image Generation Models (ICCV 2023)](https://github.com/j-min/DallEval) paper. Please note that these files merged together and are uploaded here for convenience but can be obtained otherwise by the original [TRUST repo](https://github.com/HavenFeng/TRUST).
abhayzala/TRUSTDataFiles
[ "region:us" ]
2023-10-03T14:22:48+00:00
{}
2023-10-03T14:33:46+00:00
[]
[]
TAGS #region-us
Required data files for using the TRUST model for skin tone analysis for the DALL-Eval: Probing the Reasoning Skills and Social Biases of Text-to-Image Generation Models (ICCV 2023) paper. Please note that these files merged together and are uploaded here for convenience but can be obtained otherwise by the original TRUST repo.
[]
[ "TAGS\n#region-us \n" ]
[ 6 ]
[ "passage: TAGS\n#region-us \n" ]
15add3a9d1ff51f075dbef9d9e66a49317a24ad8
# Dataset Card for MegaWika ## Dataset Description - **Homepage:** [HuggingFace](https://huggingface.co/datasets/hltcoe/megawika) - **Repository:** [HuggingFace](https://huggingface.co/datasets/hltcoe/megawika) - **Paper:** [Coming soon] - **Leaderboard:** [Coming soon] - **Point of Contact:** [Samuel Barham]([email protected]) ### Dataset Summary MegaWika is a multi- and crosslingual text dataset containing 30 million Wikipedia passages with their scraped and cleaned web citations. The passages span 50 Wikipedias in 50 languages, and the articles in which the passages were originally embedded are included for convenience. Where a Wikipedia passage is in a non-English language, an automated English translation is provided. Furthermore, nearly 130 million English question/answer pairs were extracted from the passages, and FrameNet events occurring in the passages are detected using the [LOME](https://aclanthology.org/2021.eacl-demos.19.pdf) FrameNet parser. <!--- To get a feel for the dataset -- its structure, content, strengths and weaknesses -- you may visit the [dataset viewer](https://huggingface.co/spaces/hltcoe/megawika) we have set up as a HuggingFace Space. It allows the curious visitor to explore a small set of examples spread across a number of the dataset's constituent languages. --> ### Dataset Creation The pipeline through which MegaWika was created is complex, and is described in more detail in the paper (linked above), but the following diagram illustrates the basic approach. ![Illustration of MegaWikaProcess](images/MegaWikaProcess-cross-lingual.drawio.png) ### Supported Tasks and Leaderboards MegaWika is meant to support research across a variety of tasks, including report generation, summarization, information retrieval, question answering, etc. ### Languages MegaWika is divided by Wikipedia language. There are 50 languages, including English, each designated by their 2-character ISO language code: - `af`: Afrikaans - `ar`: Arabic - `az`: Azeri (Azerbaijani) - `bn`: Bengali - `cs`: Czech - `de`: German (Deutsch) - `en`: English - `es`: Spanish (Español) - `et`: Estonian - `fa`: Farsi (Persian) - `fi`: Finnish - `fr`: French - `ga`: Irish (Gaelic) - `gl`: Galician - `gu`: Gujarati - `he`: Hebrew - `hi`: Hindi - `hr`: Hungarian - `id`: Indonesian - `it`: Italian - `ja`: Japanese - `ka`: Georgian (Kartvelian/Kartlian) - `kk`: Kazakh - `km`: Khmer - `ko`: Korean - `lt`: Lithuanian - `lv`: Latvian - `mk`: Macedonian (Makedonski) - `ml`: Malay (Malayalam) - `mn`: Mongolian - `mr`: Marathi - `my`: Burmese (Myanmar language) - `ne`: Nepali - `nl`: Dutch (Nederlands) - `pl`: Polish - `ps`: Pashto - `pt`: Portuguese - `ro`: Romanian - `ru`: Russian - `si`: Sinhalese (Sri Lankan language) - `sl`: Slovenian - `sv`: Swedish (Svenska) - `ta`: Tamil - `th`: Thai - `tr`: Turkish - `uk`: Ukrainian - `ur`: Urdu - `vi`: Vietnamese - `xh`: Xhosa - `zh`: Chinese (Zhōng wén) ## Dataset Structure The dataset is divided by language, and the data for each of the 50 languages is further chunked into discrete JSON lines files. Each line of these files -- we'll call such a line an **instance** -- contains the data extracted from a single Wikipedia article. ### Data Instances Each instance contains the text of the seed Wikipedia article, along with a list of **entries**. Each entry consists basically in an extracted Wikipedia passage, the URL and scraped text of the web source it cites, a list of questions/answer pairs extracted from the passage, and a framenet parse of the passage. Where the passage is from a non-English Wikipedia, a machine translation into English is also provided. ### Data Fields The detailed structure of an instance is as follows: ``` { "article_title": <string : title of original Wikipedia article> "article_text": <string : text of Wikipedia article> "entries": [ # Wiki Passage "id": <string : passage ID> "passage": { "text": <string : text of passage in English (possibly via MT)> "parse": <list of dict : FrameNet parse of English passage text> "en_tokens": <dict : tokenization of passage in English> "lang_tokens": <dict : tokenization of original non-English passage> "en_lang_token_map": <dict : alignment mapping between English and original language token indices> } # MT "original": <string : original language passage> "original_sents": <list of string : sentencized original language passage> "translation": <string : machine translation of passage> "translation_sents": <list of string : sentencized machine translation of passage> "translation_probs": <list of float : log prob of machine translation by sentence, where available> "repetitious_translation": <string \in ("true", "false") : automated judgment on whether machine translation is pathologically repetitious> "source_lang": <string : language ID, 2-character ISO code> # Source "source_url": <string : URL of the cited web source> "source_text": <string : content extracted from the scrape of the source URL> # Question/Answer Pairs "qa_pairs": [ ... { "question": <string : generated question> "passage_id": <string : passage ID> "en_answer": <string : English answer> "lang_answer": <string : aligned original language answer> "frames": [ ... { "frame": <string : frame triggered by the question> "argument": <string : detected frame arguments> } ... ] # NB: answer matches can be empty, in the case no matching span exists "en_matches_in_source": <list of int : start and end index of the English language-answer token(s) in the source document> "en_match_in_passage": <list of int : start and end index of the English language-answer token(s) in the English language translation of the passage> "lang_matches_in_source": <list of int : start and end index of the original language-answer token(s) in the source document> "lang_match_in_passage": <list of int : start and end index of the original language-answer token(s) in the original language passage> "passage": <list of string : sentencized view of the passage> "en_answer_tokens": <list of string> "match_disambiguated_question": <string : disambiguated version of question obtained by matching pronouns with article title (noisy but often helpful)> } ... ] ] } ``` English language instances differ not in structure but in content; 1. Fields in the block labeled "MT" above are naturally null (that is, they are set to falsy values in Python -- specifically `None`) 2. Since the Wiki passage only exists in English, and has no corresponding non-English "original language" version, answer spans also necessarily have only an English-language version (and no non-English "original-language" version. Therefore, fields in the `qa_pairs` block beginning with `lang_` are set to null/falsy values in Python (in this case, empty lists). ### Data Splits MegaWika is currently split only by language, as each task will imply its own approach to filtering, sampling, downselecting, and splitting into train/test splits. <!--- ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] --> ## Licensing and Takedown MegaWika 1.0 consists in part of documents scraped from across the web (based on citations linked in Wikipedia articles.) We do not own any of the scraped text nor do we claim copyright: text drawn from Wikipedia citations are meant for research use in algorithmic design and model training. We release this dataset and all its contents under CC-BY-SA-4.0. ### Notice and Takedown Policy: *NB*: Should you consider that our data contains material that is owned by you and should therefore not be reproduced here, please: - Clearly identify yourself, with detailed contact data such as an address, telephone number or email address at which you can be contacted. - Clearly identify the copyrighted work claimed to be infringed. - Clearly identify the material that is claimed to be infringing and information reasonably sufficient to allow us to locate the material. And contact the authors. *Take down*: We will comply to legitimate requests by removing the affected sources from the next release of the dataset. ## Additional Information ### Dataset Curators Released and maintained by the Johns Hopkins University Human Language Technology Center of Excellence (JHU/HLTCOE). You can contact one the MegaWika authors, including [Samuel Barham](mailto:[email protected]), [Orion Weller](mailto:[email protected]), and [Ben van Durme](mailto:[email protected]) with questions. ### Licensing Information Released under the [Attribution-ShareAlike 4.0 International (CC BY-SA 4.0)](https://creativecommons.org/licenses/by-sa/4.0/) license. ### Citation Information ``` @misc{barham2023megawika, title={MegaWika: Millions of reports and their sources across 50 diverse languages}, author={Samuel Barham and and Weller and Michelle Yuan and Kenton Murray and Mahsa Yarmohammadi and Zhengping Jiang and Siddharth Vashishtha and Alexander Martin and Anqi Liu and Aaron Steven White and Jordan Boyd-Graber and Benjamin Van Durme}, year={2023}, eprint={2307.07049}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` <!-- ### Contributions [More Information Needed] -->
sbarham/megawika-test
[ "task_categories:summarization", "task_categories:question-answering", "task_categories:text-generation", "task_categories:text2text-generation", "size_categories:10M<n<100M", "language:af", "language:ar", "language:az", "language:bn", "language:cs", "language:de", "language:en", "language:es", "language:et", "language:fa", "language:fi", "language:fr", "language:ga", "language:gl", "language:gu", "language:he", "language:hi", "language:hr", "language:id", "language:it", "language:ja", "language:ka", "language:kk", "language:km", "language:ko", "language:lt", "language:lv", "language:mk", "language:ml", "language:mn", "language:mr", "language:my", "language:ne", "language:nl", "language:pl", "language:ps", "language:pt", "language:ro", "language:ru", "language:si", "language:sl", "language:sv", "language:ta", "language:th", "language:tr", "language:uk", "language:ur", "language:vi", "language:xh", "language:zh", "license:cc-by-sa-4.0", "arxiv:2307.07049", "region:us" ]
2023-10-03T14:24:34+00:00
{"language": ["af", "ar", "az", "bn", "cs", "de", "en", "es", "et", "fa", "fi", "fr", "ga", "gl", "gu", "he", "hi", "hr", "id", "it", "ja", "ka", "kk", "km", "ko", "lt", "lv", "mk", "ml", "mn", "mr", "my", "ne", "nl", "pl", "ps", "pt", "ro", "ru", "si", "sl", "sv", "ta", "th", "tr", "uk", "ur", "vi", "xh", "zh"], "license": "cc-by-sa-4.0", "size_categories": ["10M<n<100M"], "task_categories": ["summarization", "question-answering", "text-generation", "text2text-generation"], "pretty_name": "MegaWika"}
2023-10-03T16:22:49+00:00
[ "2307.07049" ]
[ "af", "ar", "az", "bn", "cs", "de", "en", "es", "et", "fa", "fi", "fr", "ga", "gl", "gu", "he", "hi", "hr", "id", "it", "ja", "ka", "kk", "km", "ko", "lt", "lv", "mk", "ml", "mn", "mr", "my", "ne", "nl", "pl", "ps", "pt", "ro", "ru", "si", "sl", "sv", "ta", "th", "tr", "uk", "ur", "vi", "xh", "zh" ]
TAGS #task_categories-summarization #task_categories-question-answering #task_categories-text-generation #task_categories-text2text-generation #size_categories-10M<n<100M #language-Afrikaans #language-Arabic #language-Azerbaijani #language-Bengali #language-Czech #language-German #language-English #language-Spanish #language-Estonian #language-Persian #language-Finnish #language-French #language-Irish #language-Galician #language-Gujarati #language-Hebrew #language-Hindi #language-Croatian #language-Indonesian #language-Italian #language-Japanese #language-Georgian #language-Kazakh #language-Khmer #language-Korean #language-Lithuanian #language-Latvian #language-Macedonian #language-Malayalam #language-Mongolian #language-Marathi #language-Burmese #language-Nepali (macrolanguage) #language-Dutch #language-Polish #language-Pushto #language-Portuguese #language-Romanian #language-Russian #language-Sinhala #language-Slovenian #language-Swedish #language-Tamil #language-Thai #language-Turkish #language-Ukrainian #language-Urdu #language-Vietnamese #language-Xhosa #language-Chinese #license-cc-by-sa-4.0 #arxiv-2307.07049 #region-us
# Dataset Card for MegaWika ## Dataset Description - Homepage: HuggingFace - Repository: HuggingFace - Paper: [Coming soon] - Leaderboard: [Coming soon] - Point of Contact: Samuel Barham ### Dataset Summary MegaWika is a multi- and crosslingual text dataset containing 30 million Wikipedia passages with their scraped and cleaned web citations. The passages span 50 Wikipedias in 50 languages, and the articles in which the passages were originally embedded are included for convenience. Where a Wikipedia passage is in a non-English language, an automated English translation is provided. Furthermore, nearly 130 million English question/answer pairs were extracted from the passages, and FrameNet events occurring in the passages are detected using the LOME FrameNet parser. ### Dataset Creation The pipeline through which MegaWika was created is complex, and is described in more detail in the paper (linked above), but the following diagram illustrates the basic approach. !Illustration of MegaWikaProcess ### Supported Tasks and Leaderboards MegaWika is meant to support research across a variety of tasks, including report generation, summarization, information retrieval, question answering, etc. ### Languages MegaWika is divided by Wikipedia language. There are 50 languages, including English, each designated by their 2-character ISO language code: - 'af': Afrikaans - 'ar': Arabic - 'az': Azeri (Azerbaijani) - 'bn': Bengali - 'cs': Czech - 'de': German (Deutsch) - 'en': English - 'es': Spanish (Español) - 'et': Estonian - 'fa': Farsi (Persian) - 'fi': Finnish - 'fr': French - 'ga': Irish (Gaelic) - 'gl': Galician - 'gu': Gujarati - 'he': Hebrew - 'hi': Hindi - 'hr': Hungarian - 'id': Indonesian - 'it': Italian - 'ja': Japanese - 'ka': Georgian (Kartvelian/Kartlian) - 'kk': Kazakh - 'km': Khmer - 'ko': Korean - 'lt': Lithuanian - 'lv': Latvian - 'mk': Macedonian (Makedonski) - 'ml': Malay (Malayalam) - 'mn': Mongolian - 'mr': Marathi - 'my': Burmese (Myanmar language) - 'ne': Nepali - 'nl': Dutch (Nederlands) - 'pl': Polish - 'ps': Pashto - 'pt': Portuguese - 'ro': Romanian - 'ru': Russian - 'si': Sinhalese (Sri Lankan language) - 'sl': Slovenian - 'sv': Swedish (Svenska) - 'ta': Tamil - 'th': Thai - 'tr': Turkish - 'uk': Ukrainian - 'ur': Urdu - 'vi': Vietnamese - 'xh': Xhosa - 'zh': Chinese (Zhōng wén) ## Dataset Structure The dataset is divided by language, and the data for each of the 50 languages is further chunked into discrete JSON lines files. Each line of these files -- we'll call such a line an instance -- contains the data extracted from a single Wikipedia article. ### Data Instances Each instance contains the text of the seed Wikipedia article, along with a list of entries. Each entry consists basically in an extracted Wikipedia passage, the URL and scraped text of the web source it cites, a list of questions/answer pairs extracted from the passage, and a framenet parse of the passage. Where the passage is from a non-English Wikipedia, a machine translation into English is also provided. ### Data Fields The detailed structure of an instance is as follows: English language instances differ not in structure but in content; 1. Fields in the block labeled "MT" above are naturally null (that is, they are set to falsy values in Python -- specifically 'None') 2. Since the Wiki passage only exists in English, and has no corresponding non-English "original language" version, answer spans also necessarily have only an English-language version (and no non-English "original-language" version. Therefore, fields in the 'qa_pairs' block beginning with 'lang_' are set to null/falsy values in Python (in this case, empty lists). ### Data Splits MegaWika is currently split only by language, as each task will imply its own approach to filtering, sampling, downselecting, and splitting into train/test splits. ## Licensing and Takedown MegaWika 1.0 consists in part of documents scraped from across the web (based on citations linked in Wikipedia articles.) We do not own any of the scraped text nor do we claim copyright: text drawn from Wikipedia citations are meant for research use in algorithmic design and model training. We release this dataset and all its contents under CC-BY-SA-4.0. ### Notice and Takedown Policy: *NB*: Should you consider that our data contains material that is owned by you and should therefore not be reproduced here, please: - Clearly identify yourself, with detailed contact data such as an address, telephone number or email address at which you can be contacted. - Clearly identify the copyrighted work claimed to be infringed. - Clearly identify the material that is claimed to be infringing and information reasonably sufficient to allow us to locate the material. And contact the authors. *Take down*: We will comply to legitimate requests by removing the affected sources from the next release of the dataset. ## Additional Information ### Dataset Curators Released and maintained by the Johns Hopkins University Human Language Technology Center of Excellence (JHU/HLTCOE). You can contact one the MegaWika authors, including Samuel Barham, Orion Weller, and Ben van Durme with questions. ### Licensing Information Released under the Attribution-ShareAlike 4.0 International (CC BY-SA 4.0) license.
[ "# Dataset Card for MegaWika", "## Dataset Description\n\n- Homepage: HuggingFace\n- Repository: HuggingFace\n- Paper: [Coming soon]\n- Leaderboard: [Coming soon]\n- Point of Contact: Samuel Barham", "### Dataset Summary\n\nMegaWika is a multi- and crosslingual text dataset containing 30 million Wikipedia passages with their scraped and cleaned web citations. The passages span\n50 Wikipedias in 50 languages, and the articles in which the passages were originally embedded are included for convenience. Where a Wikipedia passage is in a\nnon-English language, an automated English translation is provided. Furthermore, nearly 130 million English question/answer pairs were extracted from the\npassages, and FrameNet events occurring in the passages are detected using the LOME FrameNet parser.", "### Dataset Creation\n\nThe pipeline through which MegaWika was created is complex, and is described in more detail in the paper (linked above),\nbut the following diagram illustrates the basic approach.\n\n!Illustration of MegaWikaProcess", "### Supported Tasks and Leaderboards\n\nMegaWika is meant to support research across a variety of tasks, including report generation, summarization, information retrieval, question answering, etc.", "### Languages\n\nMegaWika is divided by Wikipedia language. There are 50 languages, including English, each designated by their 2-character ISO language code:\n- 'af': Afrikaans\n- 'ar': Arabic\n- 'az': Azeri (Azerbaijani)\n- 'bn': Bengali\n- 'cs': Czech\n- 'de': German (Deutsch)\n- 'en': English\n- 'es': Spanish (Español)\n- 'et': Estonian\n- 'fa': Farsi (Persian)\n- 'fi': Finnish\n- 'fr': French\n- 'ga': Irish (Gaelic)\n- 'gl': Galician\n- 'gu': Gujarati\n- 'he': Hebrew\n- 'hi': Hindi\n- 'hr': Hungarian\n- 'id': Indonesian\n- 'it': Italian\n- 'ja': Japanese\n- 'ka': Georgian (Kartvelian/Kartlian)\n- 'kk': Kazakh\n- 'km': Khmer\n- 'ko': Korean\n- 'lt': Lithuanian\n- 'lv': Latvian\n- 'mk': Macedonian (Makedonski)\n- 'ml': Malay (Malayalam)\n- 'mn': Mongolian\n- 'mr': Marathi\n- 'my': Burmese (Myanmar language)\n- 'ne': Nepali\n- 'nl': Dutch (Nederlands)\n- 'pl': Polish\n- 'ps': Pashto\n- 'pt': Portuguese\n- 'ro': Romanian\n- 'ru': Russian\n- 'si': Sinhalese (Sri Lankan language)\n- 'sl': Slovenian\n- 'sv': Swedish (Svenska)\n- 'ta': Tamil\n- 'th': Thai\n- 'tr': Turkish\n- 'uk': Ukrainian\n- 'ur': Urdu\n- 'vi': Vietnamese\n- 'xh': Xhosa\n- 'zh': Chinese (Zhōng wén)", "## Dataset Structure\n\nThe dataset is divided by language, and the data for each of the 50 languages is further chunked into discrete JSON lines files.\nEach line of these files -- we'll call such a line an instance -- contains the data extracted from a single Wikipedia article.", "### Data Instances\n\nEach instance contains the text of the seed Wikipedia article, along with a list of entries. Each entry consists basically in\nan extracted Wikipedia passage, the URL and scraped text of the web source it cites, a list of questions/answer pairs extracted from the passage,\nand a framenet parse of the passage. Where the passage is from a non-English Wikipedia, a machine translation into English is also provided.", "### Data Fields\n\nThe detailed structure of an instance is as follows:\n\n\nEnglish language instances differ not in structure but in content; \n1. Fields in the block labeled \"MT\" above are naturally null (that is, they are set to falsy values in Python -- specifically 'None')\n2. Since the Wiki passage only exists in English, and has no corresponding non-English \"original language\" version, answer spans also necessarily have only an English-language version (and no non-English \"original-language\" version. Therefore, fields in the 'qa_pairs' block beginning with 'lang_' are set to null/falsy values in Python (in this case, empty lists).", "### Data Splits\n\nMegaWika is currently split only by language, as each task will imply its own approach to filtering, sampling, downselecting, and splitting into train/test splits.", "## Licensing and Takedown\n\nMegaWika 1.0 consists in part of documents scraped from across the web (based on citations linked in Wikipedia articles.)\n\nWe do not own any of the scraped text nor do we claim copyright: text drawn from Wikipedia citations are meant for research use in algorithmic design and model training.\n\nWe release this dataset and all its contents under CC-BY-SA-4.0.", "### Notice and Takedown Policy:\n*NB*: Should you consider that our data contains material that is owned by you and should therefore not be reproduced here, please:\n\n- Clearly identify yourself, with detailed contact data such as an address, telephone number or email address at which you can be contacted.\n- Clearly identify the copyrighted work claimed to be infringed.\n- Clearly identify the material that is claimed to be infringing and information reasonably sufficient to allow us to locate the material.\n\nAnd contact the authors.\n\n*Take down*: We will comply to legitimate requests by removing the affected sources from the next release of the dataset.", "## Additional Information", "### Dataset Curators\n\nReleased and maintained by the Johns Hopkins University Human Language Technology Center of Excellence (JHU/HLTCOE). \nYou can contact one the MegaWika authors, including Samuel Barham, Orion Weller,\nand Ben van Durme with questions.", "### Licensing Information\n\nReleased under the Attribution-ShareAlike 4.0 International (CC BY-SA 4.0) license." ]
[ "TAGS\n#task_categories-summarization #task_categories-question-answering #task_categories-text-generation #task_categories-text2text-generation #size_categories-10M<n<100M #language-Afrikaans #language-Arabic #language-Azerbaijani #language-Bengali #language-Czech #language-German #language-English #language-Spanish #language-Estonian #language-Persian #language-Finnish #language-French #language-Irish #language-Galician #language-Gujarati #language-Hebrew #language-Hindi #language-Croatian #language-Indonesian #language-Italian #language-Japanese #language-Georgian #language-Kazakh #language-Khmer #language-Korean #language-Lithuanian #language-Latvian #language-Macedonian #language-Malayalam #language-Mongolian #language-Marathi #language-Burmese #language-Nepali (macrolanguage) #language-Dutch #language-Polish #language-Pushto #language-Portuguese #language-Romanian #language-Russian #language-Sinhala #language-Slovenian #language-Swedish #language-Tamil #language-Thai #language-Turkish #language-Ukrainian #language-Urdu #language-Vietnamese #language-Xhosa #language-Chinese #license-cc-by-sa-4.0 #arxiv-2307.07049 #region-us \n", "# Dataset Card for MegaWika", "## Dataset Description\n\n- Homepage: HuggingFace\n- Repository: HuggingFace\n- Paper: [Coming soon]\n- Leaderboard: [Coming soon]\n- Point of Contact: Samuel Barham", "### Dataset Summary\n\nMegaWika is a multi- and crosslingual text dataset containing 30 million Wikipedia passages with their scraped and cleaned web citations. The passages span\n50 Wikipedias in 50 languages, and the articles in which the passages were originally embedded are included for convenience. Where a Wikipedia passage is in a\nnon-English language, an automated English translation is provided. Furthermore, nearly 130 million English question/answer pairs were extracted from the\npassages, and FrameNet events occurring in the passages are detected using the LOME FrameNet parser.", "### Dataset Creation\n\nThe pipeline through which MegaWika was created is complex, and is described in more detail in the paper (linked above),\nbut the following diagram illustrates the basic approach.\n\n!Illustration of MegaWikaProcess", "### Supported Tasks and Leaderboards\n\nMegaWika is meant to support research across a variety of tasks, including report generation, summarization, information retrieval, question answering, etc.", "### Languages\n\nMegaWika is divided by Wikipedia language. There are 50 languages, including English, each designated by their 2-character ISO language code:\n- 'af': Afrikaans\n- 'ar': Arabic\n- 'az': Azeri (Azerbaijani)\n- 'bn': Bengali\n- 'cs': Czech\n- 'de': German (Deutsch)\n- 'en': English\n- 'es': Spanish (Español)\n- 'et': Estonian\n- 'fa': Farsi (Persian)\n- 'fi': Finnish\n- 'fr': French\n- 'ga': Irish (Gaelic)\n- 'gl': Galician\n- 'gu': Gujarati\n- 'he': Hebrew\n- 'hi': Hindi\n- 'hr': Hungarian\n- 'id': Indonesian\n- 'it': Italian\n- 'ja': Japanese\n- 'ka': Georgian (Kartvelian/Kartlian)\n- 'kk': Kazakh\n- 'km': Khmer\n- 'ko': Korean\n- 'lt': Lithuanian\n- 'lv': Latvian\n- 'mk': Macedonian (Makedonski)\n- 'ml': Malay (Malayalam)\n- 'mn': Mongolian\n- 'mr': Marathi\n- 'my': Burmese (Myanmar language)\n- 'ne': Nepali\n- 'nl': Dutch (Nederlands)\n- 'pl': Polish\n- 'ps': Pashto\n- 'pt': Portuguese\n- 'ro': Romanian\n- 'ru': Russian\n- 'si': Sinhalese (Sri Lankan language)\n- 'sl': Slovenian\n- 'sv': Swedish (Svenska)\n- 'ta': Tamil\n- 'th': Thai\n- 'tr': Turkish\n- 'uk': Ukrainian\n- 'ur': Urdu\n- 'vi': Vietnamese\n- 'xh': Xhosa\n- 'zh': Chinese (Zhōng wén)", "## Dataset Structure\n\nThe dataset is divided by language, and the data for each of the 50 languages is further chunked into discrete JSON lines files.\nEach line of these files -- we'll call such a line an instance -- contains the data extracted from a single Wikipedia article.", "### Data Instances\n\nEach instance contains the text of the seed Wikipedia article, along with a list of entries. Each entry consists basically in\nan extracted Wikipedia passage, the URL and scraped text of the web source it cites, a list of questions/answer pairs extracted from the passage,\nand a framenet parse of the passage. Where the passage is from a non-English Wikipedia, a machine translation into English is also provided.", "### Data Fields\n\nThe detailed structure of an instance is as follows:\n\n\nEnglish language instances differ not in structure but in content; \n1. Fields in the block labeled \"MT\" above are naturally null (that is, they are set to falsy values in Python -- specifically 'None')\n2. Since the Wiki passage only exists in English, and has no corresponding non-English \"original language\" version, answer spans also necessarily have only an English-language version (and no non-English \"original-language\" version. Therefore, fields in the 'qa_pairs' block beginning with 'lang_' are set to null/falsy values in Python (in this case, empty lists).", "### Data Splits\n\nMegaWika is currently split only by language, as each task will imply its own approach to filtering, sampling, downselecting, and splitting into train/test splits.", "## Licensing and Takedown\n\nMegaWika 1.0 consists in part of documents scraped from across the web (based on citations linked in Wikipedia articles.)\n\nWe do not own any of the scraped text nor do we claim copyright: text drawn from Wikipedia citations are meant for research use in algorithmic design and model training.\n\nWe release this dataset and all its contents under CC-BY-SA-4.0.", "### Notice and Takedown Policy:\n*NB*: Should you consider that our data contains material that is owned by you and should therefore not be reproduced here, please:\n\n- Clearly identify yourself, with detailed contact data such as an address, telephone number or email address at which you can be contacted.\n- Clearly identify the copyrighted work claimed to be infringed.\n- Clearly identify the material that is claimed to be infringing and information reasonably sufficient to allow us to locate the material.\n\nAnd contact the authors.\n\n*Take down*: We will comply to legitimate requests by removing the affected sources from the next release of the dataset.", "## Additional Information", "### Dataset Curators\n\nReleased and maintained by the Johns Hopkins University Human Language Technology Center of Excellence (JHU/HLTCOE). \nYou can contact one the MegaWika authors, including Samuel Barham, Orion Weller,\nand Ben van Durme with questions.", "### Licensing Information\n\nReleased under the Attribution-ShareAlike 4.0 International (CC BY-SA 4.0) license." ]
[ 361, 8, 45, 136, 53, 44, 437, 65, 98, 152, 47, 90, 149, 5, 60, 24 ]
[ "passage: TAGS\n#task_categories-summarization #task_categories-question-answering #task_categories-text-generation #task_categories-text2text-generation #size_categories-10M<n<100M #language-Afrikaans #language-Arabic #language-Azerbaijani #language-Bengali #language-Czech #language-German #language-English #language-Spanish #language-Estonian #language-Persian #language-Finnish #language-French #language-Irish #language-Galician #language-Gujarati #language-Hebrew #language-Hindi #language-Croatian #language-Indonesian #language-Italian #language-Japanese #language-Georgian #language-Kazakh #language-Khmer #language-Korean #language-Lithuanian #language-Latvian #language-Macedonian #language-Malayalam #language-Mongolian #language-Marathi #language-Burmese #language-Nepali (macrolanguage) #language-Dutch #language-Polish #language-Pushto #language-Portuguese #language-Romanian #language-Russian #language-Sinhala #language-Slovenian #language-Swedish #language-Tamil #language-Thai #language-Turkish #language-Ukrainian #language-Urdu #language-Vietnamese #language-Xhosa #language-Chinese #license-cc-by-sa-4.0 #arxiv-2307.07049 #region-us \n# Dataset Card for MegaWika## Dataset Description\n\n- Homepage: HuggingFace\n- Repository: HuggingFace\n- Paper: [Coming soon]\n- Leaderboard: [Coming soon]\n- Point of Contact: Samuel Barham", "passage: ### Dataset Summary\n\nMegaWika is a multi- and crosslingual text dataset containing 30 million Wikipedia passages with their scraped and cleaned web citations. The passages span\n50 Wikipedias in 50 languages, and the articles in which the passages were originally embedded are included for convenience. Where a Wikipedia passage is in a\nnon-English language, an automated English translation is provided. Furthermore, nearly 130 million English question/answer pairs were extracted from the\npassages, and FrameNet events occurring in the passages are detected using the LOME FrameNet parser.### Dataset Creation\n\nThe pipeline through which MegaWika was created is complex, and is described in more detail in the paper (linked above),\nbut the following diagram illustrates the basic approach.\n\n!Illustration of MegaWikaProcess### Supported Tasks and Leaderboards\n\nMegaWika is meant to support research across a variety of tasks, including report generation, summarization, information retrieval, question answering, etc.", "passage: ### Languages\n\nMegaWika is divided by Wikipedia language. There are 50 languages, including English, each designated by their 2-character ISO language code:\n- 'af': Afrikaans\n- 'ar': Arabic\n- 'az': Azeri (Azerbaijani)\n- 'bn': Bengali\n- 'cs': Czech\n- 'de': German (Deutsch)\n- 'en': English\n- 'es': Spanish (Español)\n- 'et': Estonian\n- 'fa': Farsi (Persian)\n- 'fi': Finnish\n- 'fr': French\n- 'ga': Irish (Gaelic)\n- 'gl': Galician\n- 'gu': Gujarati\n- 'he': Hebrew\n- 'hi': Hindi\n- 'hr': Hungarian\n- 'id': Indonesian\n- 'it': Italian\n- 'ja': Japanese\n- 'ka': Georgian (Kartvelian/Kartlian)\n- 'kk': Kazakh\n- 'km': Khmer\n- 'ko': Korean\n- 'lt': Lithuanian\n- 'lv': Latvian\n- 'mk': Macedonian (Makedonski)\n- 'ml': Malay (Malayalam)\n- 'mn': Mongolian\n- 'mr': Marathi\n- 'my': Burmese (Myanmar language)\n- 'ne': Nepali\n- 'nl': Dutch (Nederlands)\n- 'pl': Polish\n- 'ps': Pashto\n- 'pt': Portuguese\n- 'ro': Romanian\n- 'ru': Russian\n- 'si': Sinhalese (Sri Lankan language)\n- 'sl': Slovenian\n- 'sv': Swedish (Svenska)\n- 'ta': Tamil\n- 'th': Thai\n- 'tr': Turkish\n- 'uk': Ukrainian\n- 'ur': Urdu\n- 'vi': Vietnamese\n- 'xh': Xhosa\n- 'zh': Chinese (Zhōng wén)## Dataset Structure\n\nThe dataset is divided by language, and the data for each of the 50 languages is further chunked into discrete JSON lines files.\nEach line of these files -- we'll call such a line an instance -- contains the data extracted from a single Wikipedia article.### Data Instances\n\nEach instance contains the text of the seed Wikipedia article, along with a list of entries. Each entry consists basically in\nan extracted Wikipedia passage, the URL and scraped text of the web source it cites, a list of questions/answer pairs extracted from the passage,\nand a framenet parse of the passage. Where the passage is from a non-English Wikipedia, a machine translation into English is also provided.### Data Fields\n\nThe detailed structure of an instance is as follows:\n\n\nEnglish language instances differ not in structure but in content; \n1. Fields in the block labeled \"MT\" above are naturally null (that is, they are set to falsy values in Python -- specifically 'None')\n2. Since the Wiki passage only exists in English, and has no corresponding non-English \"original language\" version, answer spans also necessarily have only an English-language version (and no non-English \"original-language\" version. Therefore, fields in the 'qa_pairs' block beginning with 'lang_' are set to null/falsy values in Python (in this case, empty lists).### Data Splits\n\nMegaWika is currently split only by language, as each task will imply its own approach to filtering, sampling, downselecting, and splitting into train/test splits.## Licensing and Takedown\n\nMegaWika 1.0 consists in part of documents scraped from across the web (based on citations linked in Wikipedia articles.)\n\nWe do not own any of the scraped text nor do we claim copyright: text drawn from Wikipedia citations are meant for research use in algorithmic design and model training.\n\nWe release this dataset and all its contents under CC-BY-SA-4.0." ]
9c7913e1f1afe18616d511fbeeb9ffb4b994ffb6
# Dataset Card for Evaluation run of wei123602/Llama-2-13b-FINETUNE4_TEST2 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/wei123602/Llama-2-13b-FINETUNE4_TEST2 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [wei123602/Llama-2-13b-FINETUNE4_TEST2](https://huggingface.co/wei123602/Llama-2-13b-FINETUNE4_TEST2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_wei123602__Llama-2-13b-FINETUNE4_TEST2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-25T21:06:32.496100](https://huggingface.co/datasets/open-llm-leaderboard/details_wei123602__Llama-2-13b-FINETUNE4_TEST2/blob/main/results_2023-10-25T21-06-32.496100.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.15016778523489932, "em_stderr": 0.0036584290259430103, "f1": 0.2005201342281873, "f1_stderr": 0.0036902547918246254, "acc": 0.449147694463709, "acc_stderr": 0.010606729865520519 }, "harness|drop|3": { "em": 0.15016778523489932, "em_stderr": 0.0036584290259430103, "f1": 0.2005201342281873, "f1_stderr": 0.0036902547918246254 }, "harness|gsm8k|5": { "acc": 0.13191811978771797, "acc_stderr": 0.009321265253857515 }, "harness|winogrande|5": { "acc": 0.7663772691397001, "acc_stderr": 0.011892194477183524 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_wei123602__Llama-2-13b-FINETUNE4_TEST2
[ "region:us" ]
2023-10-03T14:37:02+00:00
{"pretty_name": "Evaluation run of wei123602/Llama-2-13b-FINETUNE4_TEST2", "dataset_summary": "Dataset automatically created during the evaluation run of model [wei123602/Llama-2-13b-FINETUNE4_TEST2](https://huggingface.co/wei123602/Llama-2-13b-FINETUNE4_TEST2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_wei123602__Llama-2-13b-FINETUNE4_TEST2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-25T21:06:32.496100](https://huggingface.co/datasets/open-llm-leaderboard/details_wei123602__Llama-2-13b-FINETUNE4_TEST2/blob/main/results_2023-10-25T21-06-32.496100.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.15016778523489932,\n \"em_stderr\": 0.0036584290259430103,\n \"f1\": 0.2005201342281873,\n \"f1_stderr\": 0.0036902547918246254,\n \"acc\": 0.449147694463709,\n \"acc_stderr\": 0.010606729865520519\n },\n \"harness|drop|3\": {\n \"em\": 0.15016778523489932,\n \"em_stderr\": 0.0036584290259430103,\n \"f1\": 0.2005201342281873,\n \"f1_stderr\": 0.0036902547918246254\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.13191811978771797,\n \"acc_stderr\": 0.009321265253857515\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7663772691397001,\n \"acc_stderr\": 0.011892194477183524\n }\n}\n```", "repo_url": "https://huggingface.co/wei123602/Llama-2-13b-FINETUNE4_TEST2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|arc:challenge|25_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_25T21_06_32.496100", "path": ["**/details_harness|drop|3_2023-10-25T21-06-32.496100.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-25T21-06-32.496100.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_25T21_06_32.496100", "path": ["**/details_harness|gsm8k|5_2023-10-25T21-06-32.496100.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-25T21-06-32.496100.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hellaswag|10_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T15-36-38.191985.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T15-36-38.191985.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T15-36-38.191985.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_25T21_06_32.496100", "path": ["**/details_harness|winogrande|5_2023-10-25T21-06-32.496100.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-25T21-06-32.496100.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T15_36_38.191985", "path": ["results_2023-10-03T15-36-38.191985.parquet"]}, {"split": "2023_10_25T21_06_32.496100", "path": ["results_2023-10-25T21-06-32.496100.parquet"]}, {"split": "latest", "path": ["results_2023-10-25T21-06-32.496100.parquet"]}]}]}
2023-10-25T20:06:45+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of wei123602/Llama-2-13b-FINETUNE4_TEST2 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model wei123602/Llama-2-13b-FINETUNE4_TEST2 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-25T21:06:32.496100(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of wei123602/Llama-2-13b-FINETUNE4_TEST2", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model wei123602/Llama-2-13b-FINETUNE4_TEST2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-25T21:06:32.496100(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of wei123602/Llama-2-13b-FINETUNE4_TEST2", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model wei123602/Llama-2-13b-FINETUNE4_TEST2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-25T21:06:32.496100(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 28, 31, 176, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of wei123602/Llama-2-13b-FINETUNE4_TEST2## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model wei123602/Llama-2-13b-FINETUNE4_TEST2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-25T21:06:32.496100(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
e91427b71913537bbbaf9fdaeefc26d1b4facc9a
# Dataset Card for DIALOGSum Corpus ## Dataset Description ### Links - **Homepage:** https://aclanthology.org/2021.findings-acl.449 - **Repository:** https://github.com/cylnlp/dialogsum - **Paper:** https://aclanthology.org/2021.findings-acl.449 - **Point of Contact:** https://huggingface.co/knkarthick ### Dataset Summary DialogSum is a large-scale dialogue summarization dataset, consisting of 13,460 (Plus 100 holdout data for topic generation) dialogues with corresponding manually labeled summaries and topics. ### Languages English ## Dataset Structure ### Data Instances DialogSum is a large-scale dialogue summarization dataset, consisting of 13,460 dialogues (+1000 tests) split into train, test and validation. The first instance in the training set: {'id': 'train_0', 'summary': "Mr. Smith's getting a check-up, and Doctor Hawkins advises him to have one every year. Hawkins'll give some information about their classes and medications to help Mr. Smith quit smoking.", 'dialogue': "#Person1#: Hi, Mr. Smith. I'm Doctor Hawkins. Why are you here today?\n#Person2#: I found it would be a good idea to get a check-up.\n#Person1#: Yes, well, you haven't had one for 5 years. You should have one every year.\n#Person2#: I know. I figure as long as there is nothing wrong, why go see the doctor?\n#Person1#: Well, the best way to avoid serious illnesses is to find out about them early. So try to come at least once a year for your own good.\n#Person2#: Ok.\n#Person1#: Let me see here. Your eyes and ears look fine. Take a deep breath, please. Do you smoke, Mr. Smith?\n#Person2#: Yes.\n#Person1#: Smoking is the leading cause of lung cancer and heart disease, you know. You really should quit.\n#Person2#: I've tried hundreds of times, but I just can't seem to kick the habit.\n#Person1#: Well, we have classes and some medications that might help. I'll give you more information before you leave.\n#Person2#: Ok, thanks doctor.", 'topic': "get a check-up} ### Data Fields - dialogue: text of dialogue. - summary: human written summary of the dialogue. - topic: human written topic/one liner of the dialogue. - id: unique file id of an example. ### Data Splits - train: 12460 - val: 500 - test: 1500 - holdout: 100 [Only 3 features: id, dialogue, topic] ## Dataset Creation ### Curation Rationale In paper: We collect dialogue data for DialogSum from three public dialogue corpora, namely Dailydialog (Li et al., 2017), DREAM (Sun et al., 2019) and MuTual (Cui et al., 2019), as well as an English speaking practice website. These datasets contain face-to-face spoken dialogues that cover a wide range of daily-life topics, including schooling, work, medication, shopping, leisure, travel. Most conversations take place between friends, colleagues, and between service providers and customers. Compared with previous datasets, dialogues from DialogSum have distinct characteristics: Under rich real-life scenarios, including more diverse task-oriented scenarios; Have clear communication patterns and intents, which is valuable to serve as summarization sources; Have a reasonable length, which comforts the purpose of automatic summarization. We ask annotators to summarize each dialogue based on the following criteria: Convey the most salient information; Be brief; Preserve important named entities within the conversation; Be written from an observer perspective; Be written in formal language. ### Who are the source language producers? linguists ### Who are the annotators? language experts ## Licensing Information CC BY-NC-SA 4.0 ## Citation Information ``` @inproceedings{chen-etal-2021-dialogsum, title = "{D}ialog{S}um: {A} Real-Life Scenario Dialogue Summarization Dataset", author = "Chen, Yulong and Liu, Yang and Chen, Liang and Zhang, Yue", booktitle = "Findings of the Association for Computational Linguistics: ACL-IJCNLP 2021", month = aug, year = "2021", address = "Online", publisher = "Association for Computational Linguistics", url = "https://aclanthology.org/2021.findings-acl.449", doi = "10.18653/v1/2021.findings-acl.449", pages = "5062--5074", ``` ## Contributions Thanks to [@cylnlp](https://github.com/cylnlp) for adding this dataset.
PericlesSavio/resumo
[ "task_categories:summarization", "task_categories:text2text-generation", "task_categories:text-generation", "annotations_creators:expert-generated", "language_creators:expert-generated", "multilinguality:monolingual", "size_categories:10K<n<100K", "source_datasets:original", "language:en", "license:cc-by-nc-sa-4.0", "dialogue-summary", "one-liner-summary", "meeting-title", "email-subject", "region:us" ]
2023-10-03T14:41:08+00:00
{"annotations_creators": ["expert-generated"], "language_creators": ["expert-generated"], "language": ["en"], "license": "cc-by-nc-sa-4.0", "multilinguality": ["monolingual"], "size_categories": ["10K<n<100K"], "source_datasets": ["original"], "task_categories": ["summarization", "text2text-generation", "text-generation"], "task_ids": [], "pretty_name": "DIALOGSum Corpus", "tags": ["dialogue-summary", "one-liner-summary", "meeting-title", "email-subject"]}
2023-10-03T16:47:52+00:00
[]
[ "en" ]
TAGS #task_categories-summarization #task_categories-text2text-generation #task_categories-text-generation #annotations_creators-expert-generated #language_creators-expert-generated #multilinguality-monolingual #size_categories-10K<n<100K #source_datasets-original #language-English #license-cc-by-nc-sa-4.0 #dialogue-summary #one-liner-summary #meeting-title #email-subject #region-us
# Dataset Card for DIALOGSum Corpus ## Dataset Description ### Links - Homepage: URL - Repository: URL - Paper: URL - Point of Contact: URL ### Dataset Summary DialogSum is a large-scale dialogue summarization dataset, consisting of 13,460 (Plus 100 holdout data for topic generation) dialogues with corresponding manually labeled summaries and topics. ### Languages English ## Dataset Structure ### Data Instances DialogSum is a large-scale dialogue summarization dataset, consisting of 13,460 dialogues (+1000 tests) split into train, test and validation. The first instance in the training set: {'id': 'train_0', 'summary': "Mr. Smith's getting a check-up, and Doctor Hawkins advises him to have one every year. Hawkins'll give some information about their classes and medications to help Mr. Smith quit smoking.", 'dialogue': "#Person1#: Hi, Mr. Smith. I'm Doctor Hawkins. Why are you here today?\n#Person2#: I found it would be a good idea to get a check-up.\n#Person1#: Yes, well, you haven't had one for 5 years. You should have one every year.\n#Person2#: I know. I figure as long as there is nothing wrong, why go see the doctor?\n#Person1#: Well, the best way to avoid serious illnesses is to find out about them early. So try to come at least once a year for your own good.\n#Person2#: Ok.\n#Person1#: Let me see here. Your eyes and ears look fine. Take a deep breath, please. Do you smoke, Mr. Smith?\n#Person2#: Yes.\n#Person1#: Smoking is the leading cause of lung cancer and heart disease, you know. You really should quit.\n#Person2#: I've tried hundreds of times, but I just can't seem to kick the habit.\n#Person1#: Well, we have classes and some medications that might help. I'll give you more information before you leave.\n#Person2#: Ok, thanks doctor.", 'topic': "get a check-up} ### Data Fields - dialogue: text of dialogue. - summary: human written summary of the dialogue. - topic: human written topic/one liner of the dialogue. - id: unique file id of an example. ### Data Splits - train: 12460 - val: 500 - test: 1500 - holdout: 100 [Only 3 features: id, dialogue, topic] ## Dataset Creation ### Curation Rationale In paper: We collect dialogue data for DialogSum from three public dialogue corpora, namely Dailydialog (Li et al., 2017), DREAM (Sun et al., 2019) and MuTual (Cui et al., 2019), as well as an English speaking practice website. These datasets contain face-to-face spoken dialogues that cover a wide range of daily-life topics, including schooling, work, medication, shopping, leisure, travel. Most conversations take place between friends, colleagues, and between service providers and customers. Compared with previous datasets, dialogues from DialogSum have distinct characteristics: Under rich real-life scenarios, including more diverse task-oriented scenarios; Have clear communication patterns and intents, which is valuable to serve as summarization sources; Have a reasonable length, which comforts the purpose of automatic summarization. We ask annotators to summarize each dialogue based on the following criteria: Convey the most salient information; Be brief; Preserve important named entities within the conversation; Be written from an observer perspective; Be written in formal language. ### Who are the source language producers? linguists ### Who are the annotators? language experts ## Licensing Information CC BY-NC-SA 4.0 ## Contributions Thanks to @cylnlp for adding this dataset.
[ "# Dataset Card for DIALOGSum Corpus", "## Dataset Description", "### Links\n- Homepage: URL\n- Repository: URL\n- Paper: URL\n- Point of Contact: URL", "### Dataset Summary\nDialogSum is a large-scale dialogue summarization dataset, consisting of 13,460 (Plus 100 holdout data for topic generation) dialogues with corresponding manually labeled summaries and topics.", "### Languages\nEnglish", "## Dataset Structure", "### Data Instances\nDialogSum is a large-scale dialogue summarization dataset, consisting of 13,460 dialogues (+1000 tests) split into train, test and validation.\nThe first instance in the training set:\n{'id': 'train_0', 'summary': \"Mr. Smith's getting a check-up, and Doctor Hawkins advises him to have one every year. Hawkins'll give some information about their classes and medications to help Mr. Smith quit smoking.\", 'dialogue': \"#Person1#: Hi, Mr. Smith. I'm Doctor Hawkins. Why are you here today?\\n#Person2#: I found it would be a good idea to get a check-up.\\n#Person1#: Yes, well, you haven't had one for 5 years. You should have one every year.\\n#Person2#: I know. I figure as long as there is nothing wrong, why go see the doctor?\\n#Person1#: Well, the best way to avoid serious illnesses is to find out about them early. So try to come at least once a year for your own good.\\n#Person2#: Ok.\\n#Person1#: Let me see here. Your eyes and ears look fine. Take a deep breath, please. Do you smoke, Mr. Smith?\\n#Person2#: Yes.\\n#Person1#: Smoking is the leading cause of lung cancer and heart disease, you know. You really should quit.\\n#Person2#: I've tried hundreds of times, but I just can't seem to kick the habit.\\n#Person1#: Well, we have classes and some medications that might help. I'll give you more information before you leave.\\n#Person2#: Ok, thanks doctor.\", 'topic': \"get a check-up}", "### Data Fields\n- dialogue: text of dialogue.\n- summary: human written summary of the dialogue.\n- topic: human written topic/one liner of the dialogue.\n- id: unique file id of an example.", "### Data Splits\n- train: 12460\n- val: 500\n- test: 1500\n- holdout: 100 [Only 3 features: id, dialogue, topic]", "## Dataset Creation", "### Curation Rationale\nIn paper:\nWe collect dialogue data for DialogSum from three public dialogue corpora, namely Dailydialog (Li et al., 2017), DREAM (Sun et al., 2019) and MuTual (Cui et al., 2019), as well as an English speaking practice website. These datasets contain face-to-face spoken dialogues that cover a wide range of daily-life topics, including schooling, work, medication, shopping, leisure, travel. Most conversations take place between friends, colleagues, and between service providers and customers.\n\nCompared with previous datasets, dialogues from DialogSum have distinct characteristics:\n\nUnder rich real-life scenarios, including more diverse task-oriented scenarios;\nHave clear communication patterns and intents, which is valuable to serve as summarization sources;\nHave a reasonable length, which comforts the purpose of automatic summarization.\n\nWe ask annotators to summarize each dialogue based on the following criteria:\nConvey the most salient information;\nBe brief;\nPreserve important named entities within the conversation;\nBe written from an observer perspective;\nBe written in formal language.", "### Who are the source language producers?\nlinguists", "### Who are the annotators?\nlanguage experts", "## Licensing Information\nCC BY-NC-SA 4.0", "## Contributions\nThanks to @cylnlp for adding this dataset." ]
[ "TAGS\n#task_categories-summarization #task_categories-text2text-generation #task_categories-text-generation #annotations_creators-expert-generated #language_creators-expert-generated #multilinguality-monolingual #size_categories-10K<n<100K #source_datasets-original #language-English #license-cc-by-nc-sa-4.0 #dialogue-summary #one-liner-summary #meeting-title #email-subject #region-us \n", "# Dataset Card for DIALOGSum Corpus", "## Dataset Description", "### Links\n- Homepage: URL\n- Repository: URL\n- Paper: URL\n- Point of Contact: URL", "### Dataset Summary\nDialogSum is a large-scale dialogue summarization dataset, consisting of 13,460 (Plus 100 holdout data for topic generation) dialogues with corresponding manually labeled summaries and topics.", "### Languages\nEnglish", "## Dataset Structure", "### Data Instances\nDialogSum is a large-scale dialogue summarization dataset, consisting of 13,460 dialogues (+1000 tests) split into train, test and validation.\nThe first instance in the training set:\n{'id': 'train_0', 'summary': \"Mr. Smith's getting a check-up, and Doctor Hawkins advises him to have one every year. Hawkins'll give some information about their classes and medications to help Mr. Smith quit smoking.\", 'dialogue': \"#Person1#: Hi, Mr. Smith. I'm Doctor Hawkins. Why are you here today?\\n#Person2#: I found it would be a good idea to get a check-up.\\n#Person1#: Yes, well, you haven't had one for 5 years. You should have one every year.\\n#Person2#: I know. I figure as long as there is nothing wrong, why go see the doctor?\\n#Person1#: Well, the best way to avoid serious illnesses is to find out about them early. So try to come at least once a year for your own good.\\n#Person2#: Ok.\\n#Person1#: Let me see here. Your eyes and ears look fine. Take a deep breath, please. Do you smoke, Mr. Smith?\\n#Person2#: Yes.\\n#Person1#: Smoking is the leading cause of lung cancer and heart disease, you know. You really should quit.\\n#Person2#: I've tried hundreds of times, but I just can't seem to kick the habit.\\n#Person1#: Well, we have classes and some medications that might help. I'll give you more information before you leave.\\n#Person2#: Ok, thanks doctor.\", 'topic': \"get a check-up}", "### Data Fields\n- dialogue: text of dialogue.\n- summary: human written summary of the dialogue.\n- topic: human written topic/one liner of the dialogue.\n- id: unique file id of an example.", "### Data Splits\n- train: 12460\n- val: 500\n- test: 1500\n- holdout: 100 [Only 3 features: id, dialogue, topic]", "## Dataset Creation", "### Curation Rationale\nIn paper:\nWe collect dialogue data for DialogSum from three public dialogue corpora, namely Dailydialog (Li et al., 2017), DREAM (Sun et al., 2019) and MuTual (Cui et al., 2019), as well as an English speaking practice website. These datasets contain face-to-face spoken dialogues that cover a wide range of daily-life topics, including schooling, work, medication, shopping, leisure, travel. Most conversations take place between friends, colleagues, and between service providers and customers.\n\nCompared with previous datasets, dialogues from DialogSum have distinct characteristics:\n\nUnder rich real-life scenarios, including more diverse task-oriented scenarios;\nHave clear communication patterns and intents, which is valuable to serve as summarization sources;\nHave a reasonable length, which comforts the purpose of automatic summarization.\n\nWe ask annotators to summarize each dialogue based on the following criteria:\nConvey the most salient information;\nBe brief;\nPreserve important named entities within the conversation;\nBe written from an observer perspective;\nBe written in formal language.", "### Who are the source language producers?\nlinguists", "### Who are the annotators?\nlanguage experts", "## Licensing Information\nCC BY-NC-SA 4.0", "## Contributions\nThanks to @cylnlp for adding this dataset." ]
[ 133, 10, 4, 23, 52, 5, 6, 428, 46, 35, 5, 256, 12, 11, 12, 17 ]
[ "passage: TAGS\n#task_categories-summarization #task_categories-text2text-generation #task_categories-text-generation #annotations_creators-expert-generated #language_creators-expert-generated #multilinguality-monolingual #size_categories-10K<n<100K #source_datasets-original #language-English #license-cc-by-nc-sa-4.0 #dialogue-summary #one-liner-summary #meeting-title #email-subject #region-us \n# Dataset Card for DIALOGSum Corpus## Dataset Description### Links\n- Homepage: URL\n- Repository: URL\n- Paper: URL\n- Point of Contact: URL### Dataset Summary\nDialogSum is a large-scale dialogue summarization dataset, consisting of 13,460 (Plus 100 holdout data for topic generation) dialogues with corresponding manually labeled summaries and topics.### Languages\nEnglish## Dataset Structure" ]
cfd7e899a0137de91f1620b709a509977223f79e
# Dataset Card for "giant-midi-masked-v3" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
roszcz/giant-midi-masked-v3
[ "region:us" ]
2023-10-03T15:25:29+00:00
{"dataset_info": {"features": [{"name": "pitch", "sequence": "int8", "length": 90}, {"name": "start", "sequence": "float64", "length": 90}, {"name": "dstart", "sequence": "float64", "length": 90}, {"name": "end", "sequence": "float64", "length": 90}, {"name": "duration", "sequence": "float64", "length": 90}, {"name": "velocity", "sequence": "int8", "length": 90}, {"name": "source", "dtype": "string"}, {"name": "masking_space", "struct": [{"name": "<Random Mask>", "sequence": "bool", "length": 90}, {"name": "<LH Mask>", "sequence": "bool", "length": 90}, {"name": "<RH Mask>", "sequence": "bool", "length": 90}, {"name": "<Harmonic Root Mask>", "sequence": "bool", "length": 90}, {"name": "<Harmonic Outliers Mask>", "sequence": "bool", "length": 90}]}], "splits": [{"name": "train", "num_bytes": 24181696800, "num_examples": 7140520}], "download_size": 23770439021, "dataset_size": 24181696800}}
2023-10-03T17:34:23+00:00
[]
[]
TAGS #region-us
# Dataset Card for "giant-midi-masked-v3" More Information needed
[ "# Dataset Card for \"giant-midi-masked-v3\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"giant-midi-masked-v3\"\n\nMore Information needed" ]
[ 6, 20 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"giant-midi-masked-v3\"\n\nMore Information needed" ]
788482f630bcf2db6d60c0471c1ebafc3df86b92
# Bangumi Image Base of Katanagatari This is the image base of bangumi Katanagatari, we detected 22 characters, 2116 images in total. The full dataset is [here](all.zip). **Please note that these image bases are not guaranteed to be 100% cleaned, they may be noisy actual.** If you intend to manually train models using this dataset, we recommend performing necessary preprocessing on the downloaded dataset to eliminate potential noisy samples (approximately 1% probability). Here is the characters' preview: | # | Images | Download | Preview 1 | Preview 2 | Preview 3 | Preview 4 | Preview 5 | Preview 6 | Preview 7 | Preview 8 | |:------|---------:|:---------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------| | 0 | 89 | [Download](0/dataset.zip) | ![preview 1](0/preview_1.png) | ![preview 2](0/preview_2.png) | ![preview 3](0/preview_3.png) | ![preview 4](0/preview_4.png) | ![preview 5](0/preview_5.png) | ![preview 6](0/preview_6.png) | ![preview 7](0/preview_7.png) | ![preview 8](0/preview_8.png) | | 1 | 32 | [Download](1/dataset.zip) | ![preview 1](1/preview_1.png) | ![preview 2](1/preview_2.png) | ![preview 3](1/preview_3.png) | ![preview 4](1/preview_4.png) | ![preview 5](1/preview_5.png) | ![preview 6](1/preview_6.png) | ![preview 7](1/preview_7.png) | ![preview 8](1/preview_8.png) | | 2 | 32 | [Download](2/dataset.zip) | ![preview 1](2/preview_1.png) | ![preview 2](2/preview_2.png) | ![preview 3](2/preview_3.png) | ![preview 4](2/preview_4.png) | ![preview 5](2/preview_5.png) | ![preview 6](2/preview_6.png) | ![preview 7](2/preview_7.png) | ![preview 8](2/preview_8.png) | | 3 | 62 | [Download](3/dataset.zip) | ![preview 1](3/preview_1.png) | ![preview 2](3/preview_2.png) | ![preview 3](3/preview_3.png) | ![preview 4](3/preview_4.png) | ![preview 5](3/preview_5.png) | ![preview 6](3/preview_6.png) | ![preview 7](3/preview_7.png) | ![preview 8](3/preview_8.png) | | 4 | 17 | [Download](4/dataset.zip) | ![preview 1](4/preview_1.png) | ![preview 2](4/preview_2.png) | ![preview 3](4/preview_3.png) | ![preview 4](4/preview_4.png) | ![preview 5](4/preview_5.png) | ![preview 6](4/preview_6.png) | ![preview 7](4/preview_7.png) | ![preview 8](4/preview_8.png) | | 5 | 13 | [Download](5/dataset.zip) | ![preview 1](5/preview_1.png) | ![preview 2](5/preview_2.png) | ![preview 3](5/preview_3.png) | ![preview 4](5/preview_4.png) | ![preview 5](5/preview_5.png) | ![preview 6](5/preview_6.png) | ![preview 7](5/preview_7.png) | ![preview 8](5/preview_8.png) | | 6 | 15 | [Download](6/dataset.zip) | ![preview 1](6/preview_1.png) | ![preview 2](6/preview_2.png) | ![preview 3](6/preview_3.png) | ![preview 4](6/preview_4.png) | ![preview 5](6/preview_5.png) | ![preview 6](6/preview_6.png) | ![preview 7](6/preview_7.png) | ![preview 8](6/preview_8.png) | | 7 | 21 | [Download](7/dataset.zip) | ![preview 1](7/preview_1.png) | ![preview 2](7/preview_2.png) | ![preview 3](7/preview_3.png) | ![preview 4](7/preview_4.png) | ![preview 5](7/preview_5.png) | ![preview 6](7/preview_6.png) | ![preview 7](7/preview_7.png) | ![preview 8](7/preview_8.png) | | 8 | 9 | [Download](8/dataset.zip) | ![preview 1](8/preview_1.png) | ![preview 2](8/preview_2.png) | ![preview 3](8/preview_3.png) | ![preview 4](8/preview_4.png) | ![preview 5](8/preview_5.png) | ![preview 6](8/preview_6.png) | ![preview 7](8/preview_7.png) | ![preview 8](8/preview_8.png) | | 9 | 791 | [Download](9/dataset.zip) | ![preview 1](9/preview_1.png) | ![preview 2](9/preview_2.png) | ![preview 3](9/preview_3.png) | ![preview 4](9/preview_4.png) | ![preview 5](9/preview_5.png) | ![preview 6](9/preview_6.png) | ![preview 7](9/preview_7.png) | ![preview 8](9/preview_8.png) | | 10 | 60 | [Download](10/dataset.zip) | ![preview 1](10/preview_1.png) | ![preview 2](10/preview_2.png) | ![preview 3](10/preview_3.png) | ![preview 4](10/preview_4.png) | ![preview 5](10/preview_5.png) | ![preview 6](10/preview_6.png) | ![preview 7](10/preview_7.png) | ![preview 8](10/preview_8.png) | | 11 | 21 | [Download](11/dataset.zip) | ![preview 1](11/preview_1.png) | ![preview 2](11/preview_2.png) | ![preview 3](11/preview_3.png) | ![preview 4](11/preview_4.png) | ![preview 5](11/preview_5.png) | ![preview 6](11/preview_6.png) | ![preview 7](11/preview_7.png) | ![preview 8](11/preview_8.png) | | 12 | 19 | [Download](12/dataset.zip) | ![preview 1](12/preview_1.png) | ![preview 2](12/preview_2.png) | ![preview 3](12/preview_3.png) | ![preview 4](12/preview_4.png) | ![preview 5](12/preview_5.png) | ![preview 6](12/preview_6.png) | ![preview 7](12/preview_7.png) | ![preview 8](12/preview_8.png) | | 13 | 586 | [Download](13/dataset.zip) | ![preview 1](13/preview_1.png) | ![preview 2](13/preview_2.png) | ![preview 3](13/preview_3.png) | ![preview 4](13/preview_4.png) | ![preview 5](13/preview_5.png) | ![preview 6](13/preview_6.png) | ![preview 7](13/preview_7.png) | ![preview 8](13/preview_8.png) | | 14 | 54 | [Download](14/dataset.zip) | ![preview 1](14/preview_1.png) | ![preview 2](14/preview_2.png) | ![preview 3](14/preview_3.png) | ![preview 4](14/preview_4.png) | ![preview 5](14/preview_5.png) | ![preview 6](14/preview_6.png) | ![preview 7](14/preview_7.png) | ![preview 8](14/preview_8.png) | | 15 | 24 | [Download](15/dataset.zip) | ![preview 1](15/preview_1.png) | ![preview 2](15/preview_2.png) | ![preview 3](15/preview_3.png) | ![preview 4](15/preview_4.png) | ![preview 5](15/preview_5.png) | ![preview 6](15/preview_6.png) | ![preview 7](15/preview_7.png) | ![preview 8](15/preview_8.png) | | 16 | 19 | [Download](16/dataset.zip) | ![preview 1](16/preview_1.png) | ![preview 2](16/preview_2.png) | ![preview 3](16/preview_3.png) | ![preview 4](16/preview_4.png) | ![preview 5](16/preview_5.png) | ![preview 6](16/preview_6.png) | ![preview 7](16/preview_7.png) | ![preview 8](16/preview_8.png) | | 17 | 7 | [Download](17/dataset.zip) | ![preview 1](17/preview_1.png) | ![preview 2](17/preview_2.png) | ![preview 3](17/preview_3.png) | ![preview 4](17/preview_4.png) | ![preview 5](17/preview_5.png) | ![preview 6](17/preview_6.png) | ![preview 7](17/preview_7.png) | N/A | | 18 | 18 | [Download](18/dataset.zip) | ![preview 1](18/preview_1.png) | ![preview 2](18/preview_2.png) | ![preview 3](18/preview_3.png) | ![preview 4](18/preview_4.png) | ![preview 5](18/preview_5.png) | ![preview 6](18/preview_6.png) | ![preview 7](18/preview_7.png) | ![preview 8](18/preview_8.png) | | 19 | 8 | [Download](19/dataset.zip) | ![preview 1](19/preview_1.png) | ![preview 2](19/preview_2.png) | ![preview 3](19/preview_3.png) | ![preview 4](19/preview_4.png) | ![preview 5](19/preview_5.png) | ![preview 6](19/preview_6.png) | ![preview 7](19/preview_7.png) | ![preview 8](19/preview_8.png) | | 20 | 64 | [Download](20/dataset.zip) | ![preview 1](20/preview_1.png) | ![preview 2](20/preview_2.png) | ![preview 3](20/preview_3.png) | ![preview 4](20/preview_4.png) | ![preview 5](20/preview_5.png) | ![preview 6](20/preview_6.png) | ![preview 7](20/preview_7.png) | ![preview 8](20/preview_8.png) | | noise | 155 | [Download](-1/dataset.zip) | ![preview 1](-1/preview_1.png) | ![preview 2](-1/preview_2.png) | ![preview 3](-1/preview_3.png) | ![preview 4](-1/preview_4.png) | ![preview 5](-1/preview_5.png) | ![preview 6](-1/preview_6.png) | ![preview 7](-1/preview_7.png) | ![preview 8](-1/preview_8.png) |
BangumiBase/katanagatari
[ "size_categories:1K<n<10K", "license:mit", "art", "region:us" ]
2023-10-03T15:29:15+00:00
{"license": "mit", "size_categories": ["1K<n<10K"], "tags": ["art"]}
2023-10-03T16:55:57+00:00
[]
[]
TAGS #size_categories-1K<n<10K #license-mit #art #region-us
Bangumi Image Base of Katanagatari ================================== This is the image base of bangumi Katanagatari, we detected 22 characters, 2116 images in total. The full dataset is here. Please note that these image bases are not guaranteed to be 100% cleaned, they may be noisy actual. If you intend to manually train models using this dataset, we recommend performing necessary preprocessing on the downloaded dataset to eliminate potential noisy samples (approximately 1% probability). Here is the characters' preview:
[]
[ "TAGS\n#size_categories-1K<n<10K #license-mit #art #region-us \n" ]
[ 25 ]
[ "passage: TAGS\n#size_categories-1K<n<10K #license-mit #art #region-us \n" ]
1ab856a50d34fe91d1511bbc3eef8d76210dbd70
# Dataset Card for Evaluation run of Undi95/ReMM-v2.2-L2-13B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Undi95/ReMM-v2.2-L2-13B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Undi95/ReMM-v2.2-L2-13B](https://huggingface.co/Undi95/ReMM-v2.2-L2-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Undi95__ReMM-v2.2-L2-13B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-23T13:54:57.235808](https://huggingface.co/datasets/open-llm-leaderboard/details_Undi95__ReMM-v2.2-L2-13B/blob/main/results_2023-10-23T13-54-57.235808.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.037751677852348994, "em_stderr": 0.0019518721243716466, "f1": 0.10559354026845587, "f1_stderr": 0.00235422441511938, "acc": 0.4481872912020479, "acc_stderr": 0.010817015780168433 }, "harness|drop|3": { "em": 0.037751677852348994, "em_stderr": 0.0019518721243716466, "f1": 0.10559354026845587, "f1_stderr": 0.00235422441511938 }, "harness|gsm8k|5": { "acc": 0.14025777103866566, "acc_stderr": 0.009565108281428673 }, "harness|winogrande|5": { "acc": 0.7561168113654302, "acc_stderr": 0.012068923278908192 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_Undi95__ReMM-v2.2-L2-13B
[ "region:us" ]
2023-10-03T15:45:45+00:00
{"pretty_name": "Evaluation run of Undi95/ReMM-v2.2-L2-13B", "dataset_summary": "Dataset automatically created during the evaluation run of model [Undi95/ReMM-v2.2-L2-13B](https://huggingface.co/Undi95/ReMM-v2.2-L2-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Undi95__ReMM-v2.2-L2-13B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-23T13:54:57.235808](https://huggingface.co/datasets/open-llm-leaderboard/details_Undi95__ReMM-v2.2-L2-13B/blob/main/results_2023-10-23T13-54-57.235808.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.037751677852348994,\n \"em_stderr\": 0.0019518721243716466,\n \"f1\": 0.10559354026845587,\n \"f1_stderr\": 0.00235422441511938,\n \"acc\": 0.4481872912020479,\n \"acc_stderr\": 0.010817015780168433\n },\n \"harness|drop|3\": {\n \"em\": 0.037751677852348994,\n \"em_stderr\": 0.0019518721243716466,\n \"f1\": 0.10559354026845587,\n \"f1_stderr\": 0.00235422441511938\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.14025777103866566,\n \"acc_stderr\": 0.009565108281428673\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7561168113654302,\n \"acc_stderr\": 0.012068923278908192\n }\n}\n```", "repo_url": "https://huggingface.co/Undi95/ReMM-v2.2-L2-13B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|arc:challenge|25_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_23T13_54_57.235808", "path": ["**/details_harness|drop|3_2023-10-23T13-54-57.235808.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-23T13-54-57.235808.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_23T13_54_57.235808", "path": ["**/details_harness|gsm8k|5_2023-10-23T13-54-57.235808.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-23T13-54-57.235808.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hellaswag|10_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T16-45-21.105610.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T16-45-21.105610.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T16-45-21.105610.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_23T13_54_57.235808", "path": ["**/details_harness|winogrande|5_2023-10-23T13-54-57.235808.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-23T13-54-57.235808.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T16_45_21.105610", "path": ["results_2023-10-03T16-45-21.105610.parquet"]}, {"split": "2023_10_23T13_54_57.235808", "path": ["results_2023-10-23T13-54-57.235808.parquet"]}, {"split": "latest", "path": ["results_2023-10-23T13-54-57.235808.parquet"]}]}]}
2023-10-23T12:55:09+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Undi95/ReMM-v2.2-L2-13B ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model Undi95/ReMM-v2.2-L2-13B on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-23T13:54:57.235808(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of Undi95/ReMM-v2.2-L2-13B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Undi95/ReMM-v2.2-L2-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T13:54:57.235808(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Undi95/ReMM-v2.2-L2-13B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Undi95/ReMM-v2.2-L2-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T13:54:57.235808(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 23, 31, 171, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Undi95/ReMM-v2.2-L2-13B## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model Undi95/ReMM-v2.2-L2-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-23T13:54:57.235808(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
ea78b09d1c5026c78694bb61083d692cbf8e8281
# Dataset Card for Evaluation run of TheBloke/BigTranslate-13B-GPTQ ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/TheBloke/BigTranslate-13B-GPTQ - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [TheBloke/BigTranslate-13B-GPTQ](https://huggingface.co/TheBloke/BigTranslate-13B-GPTQ) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 3 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TheBloke__BigTranslate-13B-GPTQ_public", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-11-07T08:36:33.722457](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__BigTranslate-13B-GPTQ_public/blob/main/results_2023-11-07T08-36-33.722457.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.1751258389261745, "em_stderr": 0.0038923206966426068, "f1": 0.22030725671140952, "f1_stderr": 0.003963217826978471, "acc": 0.35477505919494867, "acc_stderr": 0.0063794067240323 }, "harness|drop|3": { "em": 0.1751258389261745, "em_stderr": 0.0038923206966426068, "f1": 0.22030725671140952, "f1_stderr": 0.003963217826978471 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 }, "harness|winogrande|5": { "acc": 0.7095501183898973, "acc_stderr": 0.0127588134480646 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_TheBloke__BigTranslate-13B-GPTQ
[ "region:us" ]
2023-10-03T15:50:02+00:00
{"pretty_name": "Evaluation run of TheBloke/BigTranslate-13B-GPTQ", "dataset_summary": "Dataset automatically created during the evaluation run of model [TheBloke/BigTranslate-13B-GPTQ](https://huggingface.co/TheBloke/BigTranslate-13B-GPTQ) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TheBloke__BigTranslate-13B-GPTQ_public\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-11-07T08:36:33.722457](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__BigTranslate-13B-GPTQ_public/blob/main/results_2023-11-07T08-36-33.722457.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.1751258389261745,\n \"em_stderr\": 0.0038923206966426068,\n \"f1\": 0.22030725671140952,\n \"f1_stderr\": 0.003963217826978471,\n \"acc\": 0.35477505919494867,\n \"acc_stderr\": 0.0063794067240323\n },\n \"harness|drop|3\": {\n \"em\": 0.1751258389261745,\n \"em_stderr\": 0.0038923206966426068,\n \"f1\": 0.22030725671140952,\n \"f1_stderr\": 0.003963217826978471\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7095501183898973,\n \"acc_stderr\": 0.0127588134480646\n }\n}\n```", "repo_url": "https://huggingface.co/TheBloke/BigTranslate-13B-GPTQ", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_drop_3", "data_files": [{"split": "2023_11_05T05_22_44.746843", "path": ["**/details_harness|drop|3_2023-11-05T05-22-44.746843.parquet"]}, {"split": "2023_11_07T08_36_33.722457", "path": ["**/details_harness|drop|3_2023-11-07T08-36-33.722457.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-11-07T08-36-33.722457.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_11_05T05_22_44.746843", "path": ["**/details_harness|gsm8k|5_2023-11-05T05-22-44.746843.parquet"]}, {"split": "2023_11_07T08_36_33.722457", "path": ["**/details_harness|gsm8k|5_2023-11-07T08-36-33.722457.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-11-07T08-36-33.722457.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_11_05T05_22_44.746843", "path": ["**/details_harness|winogrande|5_2023-11-05T05-22-44.746843.parquet"]}, {"split": "2023_11_07T08_36_33.722457", "path": ["**/details_harness|winogrande|5_2023-11-07T08-36-33.722457.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-11-07T08-36-33.722457.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_11_05T05_22_44.746843", "path": ["results_2023-11-05T05-22-44.746843.parquet"]}, {"split": "2023_11_07T08_36_33.722457", "path": ["results_2023-11-07T08-36-33.722457.parquet"]}, {"split": "latest", "path": ["results_2023-11-07T08-36-33.722457.parquet"]}]}]}
2023-12-01T14:19:51+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of TheBloke/BigTranslate-13B-GPTQ ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model TheBloke/BigTranslate-13B-GPTQ on the Open LLM Leaderboard. The dataset is composed of 3 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-11-07T08:36:33.722457(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of TheBloke/BigTranslate-13B-GPTQ", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model TheBloke/BigTranslate-13B-GPTQ on the Open LLM Leaderboard.\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-11-07T08:36:33.722457(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of TheBloke/BigTranslate-13B-GPTQ", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model TheBloke/BigTranslate-13B-GPTQ on the Open LLM Leaderboard.\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-11-07T08:36:33.722457(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 22, 31, 171, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of TheBloke/BigTranslate-13B-GPTQ## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model TheBloke/BigTranslate-13B-GPTQ on the Open LLM Leaderboard.\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-11-07T08:36:33.722457(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
292aaae99cbecacad50f692d7327887f05dacaf2
## Textbooks Are All You Need Leveraging Large Language Models (LLMs), there's an opportunity to create a comprehensive open-source repository reminiscent of the historic Library of Alexandria. This initiative represents a preliminary attempt at producing high-quality books covering an extensive range of subjects. The source of these samples varies: - Some generated using the RAG model, referencing Wikipedia or other search data. - Some are completely synthetically generated. - Some created using GPT-3.5 and others with GPT-4. ### Generation: - **[Textbook Quality](https://github.com/VikParuchuri/textbook_quality)**: 1391 samples & ~48M tokens of serp RAG programming texts - **[SciPhi](https://github.com/emrgnt-cmplxty/SciPhi)**: 300 samples & ~38M tokens of wikipedia RAG + full synthetic general textbooks For a comprehensive view, explore our collection on GitHub: **[Library of Phi](https://github.com/emrgnt-cmplxty/library_of_phi)**. ---
open-phi/textbooks
[ "region:us" ]
2023-10-03T15:55:38+00:00
{"dataset_info": {"features": [{"name": "topic", "dtype": "string"}, {"name": "model", "dtype": "string"}, {"name": "concepts", "dtype": "string"}, {"name": "outline", "dtype": "string"}, {"name": "markdown", "dtype": "string"}, {"name": "field", "dtype": "string"}, {"name": "subfield", "dtype": "string"}, {"name": "rag", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 397014633, "num_examples": 1795}], "download_size": 134557403, "dataset_size": 397014633}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-08T04:07:09+00:00
[]
[]
TAGS #region-us
## Textbooks Are All You Need Leveraging Large Language Models (LLMs), there's an opportunity to create a comprehensive open-source repository reminiscent of the historic Library of Alexandria. This initiative represents a preliminary attempt at producing high-quality books covering an extensive range of subjects. The source of these samples varies: - Some generated using the RAG model, referencing Wikipedia or other search data. - Some are completely synthetically generated. - Some created using GPT-3.5 and others with GPT-4. ### Generation: - Textbook Quality: 1391 samples & ~48M tokens of serp RAG programming texts - SciPhi: 300 samples & ~38M tokens of wikipedia RAG + full synthetic general textbooks For a comprehensive view, explore our collection on GitHub: Library of Phi. ---
[ "## Textbooks Are All You Need\n\nLeveraging Large Language Models (LLMs), there's an opportunity to create a comprehensive open-source repository reminiscent of the historic Library of Alexandria.\n\nThis initiative represents a preliminary attempt at producing high-quality books covering an extensive range of subjects. The source of these samples varies:\n\n- Some generated using the RAG model, referencing Wikipedia or other search data.\n- Some are completely synthetically generated.\n- Some created using GPT-3.5 and others with GPT-4.", "### Generation:\n- Textbook Quality: 1391 samples & ~48M tokens of serp RAG programming texts\n- SciPhi: 300 samples & ~38M tokens of wikipedia RAG + full synthetic general textbooks\n\nFor a comprehensive view, explore our collection on GitHub: Library of Phi.\n\n---" ]
[ "TAGS\n#region-us \n", "## Textbooks Are All You Need\n\nLeveraging Large Language Models (LLMs), there's an opportunity to create a comprehensive open-source repository reminiscent of the historic Library of Alexandria.\n\nThis initiative represents a preliminary attempt at producing high-quality books covering an extensive range of subjects. The source of these samples varies:\n\n- Some generated using the RAG model, referencing Wikipedia or other search data.\n- Some are completely synthetically generated.\n- Some created using GPT-3.5 and others with GPT-4.", "### Generation:\n- Textbook Quality: 1391 samples & ~48M tokens of serp RAG programming texts\n- SciPhi: 300 samples & ~38M tokens of wikipedia RAG + full synthetic general textbooks\n\nFor a comprehensive view, explore our collection on GitHub: Library of Phi.\n\n---" ]
[ 6, 122, 73 ]
[ "passage: TAGS\n#region-us \n## Textbooks Are All You Need\n\nLeveraging Large Language Models (LLMs), there's an opportunity to create a comprehensive open-source repository reminiscent of the historic Library of Alexandria.\n\nThis initiative represents a preliminary attempt at producing high-quality books covering an extensive range of subjects. The source of these samples varies:\n\n- Some generated using the RAG model, referencing Wikipedia or other search data.\n- Some are completely synthetically generated.\n- Some created using GPT-3.5 and others with GPT-4.### Generation:\n- Textbook Quality: 1391 samples & ~48M tokens of serp RAG programming texts\n- SciPhi: 300 samples & ~38M tokens of wikipedia RAG + full synthetic general textbooks\n\nFor a comprehensive view, explore our collection on GitHub: Library of Phi.\n\n---" ]
70b4f3b739c128272b4ba64fbf6314e9b32e360c
# Dataset Card for Evaluation run of dhmeltzer/Llama-2-13b-hf-ds_wiki_1024_full_r_64_alpha_16_merged ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/dhmeltzer/Llama-2-13b-hf-ds_wiki_1024_full_r_64_alpha_16_merged - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [dhmeltzer/Llama-2-13b-hf-ds_wiki_1024_full_r_64_alpha_16_merged](https://huggingface.co/dhmeltzer/Llama-2-13b-hf-ds_wiki_1024_full_r_64_alpha_16_merged) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_dhmeltzer__Llama-2-13b-hf-ds_wiki_1024_full_r_64_alpha_16_merged", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-23T13:20:24.503429](https://huggingface.co/datasets/open-llm-leaderboard/details_dhmeltzer__Llama-2-13b-hf-ds_wiki_1024_full_r_64_alpha_16_merged/blob/main/results_2023-10-23T13-20-24.503429.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.002307046979865772, "em_stderr": 0.0004913221265094568, "f1": 0.06630557885906033, "f1_stderr": 0.0014292163093715619, "acc": 0.4319026216914706, "acc_stderr": 0.010283324097975763 }, "harness|drop|3": { "em": 0.002307046979865772, "em_stderr": 0.0004913221265094568, "f1": 0.06630557885906033, "f1_stderr": 0.0014292163093715619 }, "harness|gsm8k|5": { "acc": 0.1068991660348749, "acc_stderr": 0.008510982565520481 }, "harness|winogrande|5": { "acc": 0.7569060773480663, "acc_stderr": 0.012055665630431043 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_dhmeltzer__Llama-2-13b-hf-ds_wiki_1024_full_r_64_alpha_16_merged
[ "region:us" ]
2023-10-03T16:04:23+00:00
{"pretty_name": "Evaluation run of dhmeltzer/Llama-2-13b-hf-ds_wiki_1024_full_r_64_alpha_16_merged", "dataset_summary": "Dataset automatically created during the evaluation run of model [dhmeltzer/Llama-2-13b-hf-ds_wiki_1024_full_r_64_alpha_16_merged](https://huggingface.co/dhmeltzer/Llama-2-13b-hf-ds_wiki_1024_full_r_64_alpha_16_merged) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_dhmeltzer__Llama-2-13b-hf-ds_wiki_1024_full_r_64_alpha_16_merged\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-23T13:20:24.503429](https://huggingface.co/datasets/open-llm-leaderboard/details_dhmeltzer__Llama-2-13b-hf-ds_wiki_1024_full_r_64_alpha_16_merged/blob/main/results_2023-10-23T13-20-24.503429.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.002307046979865772,\n \"em_stderr\": 0.0004913221265094568,\n \"f1\": 0.06630557885906033,\n \"f1_stderr\": 0.0014292163093715619,\n \"acc\": 0.4319026216914706,\n \"acc_stderr\": 0.010283324097975763\n },\n \"harness|drop|3\": {\n \"em\": 0.002307046979865772,\n \"em_stderr\": 0.0004913221265094568,\n \"f1\": 0.06630557885906033,\n \"f1_stderr\": 0.0014292163093715619\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.1068991660348749,\n \"acc_stderr\": 0.008510982565520481\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7569060773480663,\n \"acc_stderr\": 0.012055665630431043\n }\n}\n```", "repo_url": "https://huggingface.co/dhmeltzer/Llama-2-13b-hf-ds_wiki_1024_full_r_64_alpha_16_merged", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|arc:challenge|25_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_23T13_20_24.503429", "path": ["**/details_harness|drop|3_2023-10-23T13-20-24.503429.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-23T13-20-24.503429.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_23T13_20_24.503429", "path": ["**/details_harness|gsm8k|5_2023-10-23T13-20-24.503429.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-23T13-20-24.503429.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hellaswag|10_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T17-03-59.314428.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T17-03-59.314428.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T17-03-59.314428.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_23T13_20_24.503429", "path": ["**/details_harness|winogrande|5_2023-10-23T13-20-24.503429.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-23T13-20-24.503429.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T17_03_59.314428", "path": ["results_2023-10-03T17-03-59.314428.parquet"]}, {"split": "2023_10_23T13_20_24.503429", "path": ["results_2023-10-23T13-20-24.503429.parquet"]}, {"split": "latest", "path": ["results_2023-10-23T13-20-24.503429.parquet"]}]}]}
2023-10-23T12:20:36+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of dhmeltzer/Llama-2-13b-hf-ds_wiki_1024_full_r_64_alpha_16_merged ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model dhmeltzer/Llama-2-13b-hf-ds_wiki_1024_full_r_64_alpha_16_merged on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-23T13:20:24.503429(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of dhmeltzer/Llama-2-13b-hf-ds_wiki_1024_full_r_64_alpha_16_merged", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model dhmeltzer/Llama-2-13b-hf-ds_wiki_1024_full_r_64_alpha_16_merged on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T13:20:24.503429(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of dhmeltzer/Llama-2-13b-hf-ds_wiki_1024_full_r_64_alpha_16_merged", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model dhmeltzer/Llama-2-13b-hf-ds_wiki_1024_full_r_64_alpha_16_merged on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T13:20:24.503429(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 42, 31, 190, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of dhmeltzer/Llama-2-13b-hf-ds_wiki_1024_full_r_64_alpha_16_merged## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model dhmeltzer/Llama-2-13b-hf-ds_wiki_1024_full_r_64_alpha_16_merged on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-23T13:20:24.503429(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
03ff0248cae8dc15d1cd80539fdb1ebe2a4b9f53
# Dataset Card for Evaluation run of dhmeltzer/Llama-2-13b-hf-ds_eli5_1024_r_64_alpha_16_merged ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/dhmeltzer/Llama-2-13b-hf-ds_eli5_1024_r_64_alpha_16_merged - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [dhmeltzer/Llama-2-13b-hf-ds_eli5_1024_r_64_alpha_16_merged](https://huggingface.co/dhmeltzer/Llama-2-13b-hf-ds_eli5_1024_r_64_alpha_16_merged) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_dhmeltzer__Llama-2-13b-hf-ds_eli5_1024_r_64_alpha_16_merged", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-28T17:04:10.334813](https://huggingface.co/datasets/open-llm-leaderboard/details_dhmeltzer__Llama-2-13b-hf-ds_eli5_1024_r_64_alpha_16_merged/blob/main/results_2023-10-28T17-04-10.334813.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.022651006711409395, "em_stderr": 0.001523730780343799, "f1": 0.09599203020134199, "f1_stderr": 0.0020758446587035915, "acc": 0.42256576668005047, "acc_stderr": 0.00972711232409914 }, "harness|drop|3": { "em": 0.022651006711409395, "em_stderr": 0.001523730780343799, "f1": 0.09599203020134199, "f1_stderr": 0.0020758446587035915 }, "harness|gsm8k|5": { "acc": 0.08112206216830932, "acc_stderr": 0.007520395797922653 }, "harness|winogrande|5": { "acc": 0.7640094711917916, "acc_stderr": 0.011933828850275625 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_dhmeltzer__Llama-2-13b-hf-ds_eli5_1024_r_64_alpha_16_merged
[ "region:us" ]
2023-10-03T16:10:58+00:00
{"pretty_name": "Evaluation run of dhmeltzer/Llama-2-13b-hf-ds_eli5_1024_r_64_alpha_16_merged", "dataset_summary": "Dataset automatically created during the evaluation run of model [dhmeltzer/Llama-2-13b-hf-ds_eli5_1024_r_64_alpha_16_merged](https://huggingface.co/dhmeltzer/Llama-2-13b-hf-ds_eli5_1024_r_64_alpha_16_merged) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_dhmeltzer__Llama-2-13b-hf-ds_eli5_1024_r_64_alpha_16_merged\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-28T17:04:10.334813](https://huggingface.co/datasets/open-llm-leaderboard/details_dhmeltzer__Llama-2-13b-hf-ds_eli5_1024_r_64_alpha_16_merged/blob/main/results_2023-10-28T17-04-10.334813.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.022651006711409395,\n \"em_stderr\": 0.001523730780343799,\n \"f1\": 0.09599203020134199,\n \"f1_stderr\": 0.0020758446587035915,\n \"acc\": 0.42256576668005047,\n \"acc_stderr\": 0.00972711232409914\n },\n \"harness|drop|3\": {\n \"em\": 0.022651006711409395,\n \"em_stderr\": 0.001523730780343799,\n \"f1\": 0.09599203020134199,\n \"f1_stderr\": 0.0020758446587035915\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.08112206216830932,\n \"acc_stderr\": 0.007520395797922653\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7640094711917916,\n \"acc_stderr\": 0.011933828850275625\n }\n}\n```", "repo_url": "https://huggingface.co/dhmeltzer/Llama-2-13b-hf-ds_eli5_1024_r_64_alpha_16_merged", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|arc:challenge|25_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_28T17_04_10.334813", "path": ["**/details_harness|drop|3_2023-10-28T17-04-10.334813.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-28T17-04-10.334813.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_28T17_04_10.334813", "path": ["**/details_harness|gsm8k|5_2023-10-28T17-04-10.334813.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-28T17-04-10.334813.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hellaswag|10_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T17-10-34.313268.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T17-10-34.313268.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T17-10-34.313268.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_28T17_04_10.334813", "path": ["**/details_harness|winogrande|5_2023-10-28T17-04-10.334813.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-28T17-04-10.334813.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T17_10_34.313268", "path": ["results_2023-10-03T17-10-34.313268.parquet"]}, {"split": "2023_10_28T17_04_10.334813", "path": ["results_2023-10-28T17-04-10.334813.parquet"]}, {"split": "latest", "path": ["results_2023-10-28T17-04-10.334813.parquet"]}]}]}
2023-10-28T16:04:22+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of dhmeltzer/Llama-2-13b-hf-ds_eli5_1024_r_64_alpha_16_merged ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model dhmeltzer/Llama-2-13b-hf-ds_eli5_1024_r_64_alpha_16_merged on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-28T17:04:10.334813(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of dhmeltzer/Llama-2-13b-hf-ds_eli5_1024_r_64_alpha_16_merged", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model dhmeltzer/Llama-2-13b-hf-ds_eli5_1024_r_64_alpha_16_merged on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-28T17:04:10.334813(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of dhmeltzer/Llama-2-13b-hf-ds_eli5_1024_r_64_alpha_16_merged", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model dhmeltzer/Llama-2-13b-hf-ds_eli5_1024_r_64_alpha_16_merged on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-28T17:04:10.334813(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 41, 31, 189, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of dhmeltzer/Llama-2-13b-hf-ds_eli5_1024_r_64_alpha_16_merged## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model dhmeltzer/Llama-2-13b-hf-ds_eli5_1024_r_64_alpha_16_merged on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-28T17:04:10.334813(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
343f126c0e54b166661e9d7a0a70e2a88ce3c501
# Dataset Card for Evaluation run of breadlicker45/dough-base-001 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/breadlicker45/dough-base-001 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [breadlicker45/dough-base-001](https://huggingface.co/breadlicker45/dough-base-001) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_breadlicker45__dough-base-001", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-29T02:32:49.723963](https://huggingface.co/datasets/open-llm-leaderboard/details_breadlicker45__dough-base-001/blob/main/results_2023-10-29T02-32-49.723963.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0, "em_stderr": 0.0, "f1": 0.0029163171140939564, "f1_stderr": 0.00019355490209304062, "acc": 0.255327545382794, "acc_stderr": 0.007024647268145198 }, "harness|drop|3": { "em": 0.0, "em_stderr": 0.0, "f1": 0.0029163171140939564, "f1_stderr": 0.00019355490209304062 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 }, "harness|winogrande|5": { "acc": 0.510655090765588, "acc_stderr": 0.014049294536290396 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_breadlicker45__dough-base-001
[ "region:us" ]
2023-10-03T16:12:45+00:00
{"pretty_name": "Evaluation run of breadlicker45/dough-base-001", "dataset_summary": "Dataset automatically created during the evaluation run of model [breadlicker45/dough-base-001](https://huggingface.co/breadlicker45/dough-base-001) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_breadlicker45__dough-base-001\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-29T02:32:49.723963](https://huggingface.co/datasets/open-llm-leaderboard/details_breadlicker45__dough-base-001/blob/main/results_2023-10-29T02-32-49.723963.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0,\n \"em_stderr\": 0.0,\n \"f1\": 0.0029163171140939564,\n \"f1_stderr\": 0.00019355490209304062,\n \"acc\": 0.255327545382794,\n \"acc_stderr\": 0.007024647268145198\n },\n \"harness|drop|3\": {\n \"em\": 0.0,\n \"em_stderr\": 0.0,\n \"f1\": 0.0029163171140939564,\n \"f1_stderr\": 0.00019355490209304062\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.510655090765588,\n \"acc_stderr\": 0.014049294536290396\n }\n}\n```", "repo_url": "https://huggingface.co/breadlicker45/dough-base-001", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|arc:challenge|25_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_29T02_00_15.543056", "path": ["**/details_harness|drop|3_2023-10-29T02-00-15.543056.parquet"]}, {"split": "2023_10_29T02_32_49.723963", "path": ["**/details_harness|drop|3_2023-10-29T02-32-49.723963.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-29T02-32-49.723963.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_29T02_00_15.543056", "path": ["**/details_harness|gsm8k|5_2023-10-29T02-00-15.543056.parquet"]}, {"split": "2023_10_29T02_32_49.723963", "path": ["**/details_harness|gsm8k|5_2023-10-29T02-32-49.723963.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-29T02-32-49.723963.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hellaswag|10_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T17-12-28.280269.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T17-12-28.280269.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T17-12-28.280269.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_29T02_00_15.543056", "path": ["**/details_harness|winogrande|5_2023-10-29T02-00-15.543056.parquet"]}, {"split": "2023_10_29T02_32_49.723963", "path": ["**/details_harness|winogrande|5_2023-10-29T02-32-49.723963.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-29T02-32-49.723963.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T17_12_28.280269", "path": ["results_2023-10-03T17-12-28.280269.parquet"]}, {"split": "2023_10_29T02_00_15.543056", "path": ["results_2023-10-29T02-00-15.543056.parquet"]}, {"split": "2023_10_29T02_32_49.723963", "path": ["results_2023-10-29T02-32-49.723963.parquet"]}, {"split": "latest", "path": ["results_2023-10-29T02-32-49.723963.parquet"]}]}]}
2023-10-29T02:32:58+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of breadlicker45/dough-base-001 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model breadlicker45/dough-base-001 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-29T02:32:49.723963(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of breadlicker45/dough-base-001", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model breadlicker45/dough-base-001 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-29T02:32:49.723963(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of breadlicker45/dough-base-001", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model breadlicker45/dough-base-001 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-29T02:32:49.723963(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 20, 31, 168, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of breadlicker45/dough-base-001## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model breadlicker45/dough-base-001 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-29T02:32:49.723963(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
14182793e313a172a74c96a546cb98288bbae9bd
# Dataset Card for Evaluation run of dhmeltzer/Llama-2-13b-hf-eli5-wiki-1024_r_64_alpha_16_merged ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/dhmeltzer/Llama-2-13b-hf-eli5-wiki-1024_r_64_alpha_16_merged - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [dhmeltzer/Llama-2-13b-hf-eli5-wiki-1024_r_64_alpha_16_merged](https://huggingface.co/dhmeltzer/Llama-2-13b-hf-eli5-wiki-1024_r_64_alpha_16_merged) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_dhmeltzer__Llama-2-13b-hf-eli5-wiki-1024_r_64_alpha_16_merged", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-28T16:42:34.900797](https://huggingface.co/datasets/open-llm-leaderboard/details_dhmeltzer__Llama-2-13b-hf-eli5-wiki-1024_r_64_alpha_16_merged/blob/main/results_2023-10-28T16-42-34.900797.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0030411073825503355, "em_stderr": 0.0005638896908753095, "f1": 0.07054530201342267, "f1_stderr": 0.0015180367806482934, "acc": 0.42638763311757666, "acc_stderr": 0.00983841076151077 }, "harness|drop|3": { "em": 0.0030411073825503355, "em_stderr": 0.0005638896908753095, "f1": 0.07054530201342267, "f1_stderr": 0.0015180367806482934 }, "harness|gsm8k|5": { "acc": 0.08718726307808947, "acc_stderr": 0.007770691416783557 }, "harness|winogrande|5": { "acc": 0.7655880031570639, "acc_stderr": 0.011906130106237983 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_dhmeltzer__Llama-2-13b-hf-eli5-wiki-1024_r_64_alpha_16_merged
[ "region:us" ]
2023-10-03T16:17:09+00:00
{"pretty_name": "Evaluation run of dhmeltzer/Llama-2-13b-hf-eli5-wiki-1024_r_64_alpha_16_merged", "dataset_summary": "Dataset automatically created during the evaluation run of model [dhmeltzer/Llama-2-13b-hf-eli5-wiki-1024_r_64_alpha_16_merged](https://huggingface.co/dhmeltzer/Llama-2-13b-hf-eli5-wiki-1024_r_64_alpha_16_merged) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_dhmeltzer__Llama-2-13b-hf-eli5-wiki-1024_r_64_alpha_16_merged\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-28T16:42:34.900797](https://huggingface.co/datasets/open-llm-leaderboard/details_dhmeltzer__Llama-2-13b-hf-eli5-wiki-1024_r_64_alpha_16_merged/blob/main/results_2023-10-28T16-42-34.900797.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0030411073825503355,\n \"em_stderr\": 0.0005638896908753095,\n \"f1\": 0.07054530201342267,\n \"f1_stderr\": 0.0015180367806482934,\n \"acc\": 0.42638763311757666,\n \"acc_stderr\": 0.00983841076151077\n },\n \"harness|drop|3\": {\n \"em\": 0.0030411073825503355,\n \"em_stderr\": 0.0005638896908753095,\n \"f1\": 0.07054530201342267,\n \"f1_stderr\": 0.0015180367806482934\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.08718726307808947,\n \"acc_stderr\": 0.007770691416783557\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7655880031570639,\n \"acc_stderr\": 0.011906130106237983\n }\n}\n```", "repo_url": "https://huggingface.co/dhmeltzer/Llama-2-13b-hf-eli5-wiki-1024_r_64_alpha_16_merged", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|arc:challenge|25_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_28T16_42_34.900797", "path": ["**/details_harness|drop|3_2023-10-28T16-42-34.900797.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-28T16-42-34.900797.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_28T16_42_34.900797", "path": ["**/details_harness|gsm8k|5_2023-10-28T16-42-34.900797.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-28T16-42-34.900797.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hellaswag|10_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T17-16-44.707859.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T17-16-44.707859.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T17-16-44.707859.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_28T16_42_34.900797", "path": ["**/details_harness|winogrande|5_2023-10-28T16-42-34.900797.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-28T16-42-34.900797.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T17_16_44.707859", "path": ["results_2023-10-03T17-16-44.707859.parquet"]}, {"split": "2023_10_28T16_42_34.900797", "path": ["results_2023-10-28T16-42-34.900797.parquet"]}, {"split": "latest", "path": ["results_2023-10-28T16-42-34.900797.parquet"]}]}]}
2023-10-28T15:42:47+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of dhmeltzer/Llama-2-13b-hf-eli5-wiki-1024_r_64_alpha_16_merged ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model dhmeltzer/Llama-2-13b-hf-eli5-wiki-1024_r_64_alpha_16_merged on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-28T16:42:34.900797(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of dhmeltzer/Llama-2-13b-hf-eli5-wiki-1024_r_64_alpha_16_merged", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model dhmeltzer/Llama-2-13b-hf-eli5-wiki-1024_r_64_alpha_16_merged on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-28T16:42:34.900797(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of dhmeltzer/Llama-2-13b-hf-eli5-wiki-1024_r_64_alpha_16_merged", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model dhmeltzer/Llama-2-13b-hf-eli5-wiki-1024_r_64_alpha_16_merged on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-28T16:42:34.900797(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 39, 31, 187, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of dhmeltzer/Llama-2-13b-hf-eli5-wiki-1024_r_64_alpha_16_merged## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model dhmeltzer/Llama-2-13b-hf-eli5-wiki-1024_r_64_alpha_16_merged on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-28T16:42:34.900797(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
cb6e1acf74c2cbbb77ebd330bea47aa4f1d3efd0
# Dataset Card for Evaluation run of LTC-AI-Labs/L2-7b-Base-test-WVG ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/LTC-AI-Labs/L2-7b-Base-test-WVG - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [LTC-AI-Labs/L2-7b-Base-test-WVG](https://huggingface.co/LTC-AI-Labs/L2-7b-Base-test-WVG) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_LTC-AI-Labs__L2-7b-Base-test-WVG", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-28T07:52:27.189086](https://huggingface.co/datasets/open-llm-leaderboard/details_LTC-AI-Labs__L2-7b-Base-test-WVG/blob/main/results_2023-10-28T07-52-27.189086.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.003145973154362416, "em_stderr": 0.0005734993648436373, "f1": 0.07481229026845672, "f1_stderr": 0.0016422896702234556, "acc": 0.40267285313968093, "acc_stderr": 0.00970555723399882 }, "harness|drop|3": { "em": 0.003145973154362416, "em_stderr": 0.0005734993648436373, "f1": 0.07481229026845672, "f1_stderr": 0.0016422896702234556 }, "harness|gsm8k|5": { "acc": 0.06974981046247157, "acc_stderr": 0.007016389571013843 }, "harness|winogrande|5": { "acc": 0.7355958958168903, "acc_stderr": 0.012394724896983799 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_LTC-AI-Labs__L2-7b-Base-test-WVG
[ "region:us" ]
2023-10-03T16:19:32+00:00
{"pretty_name": "Evaluation run of LTC-AI-Labs/L2-7b-Base-test-WVG", "dataset_summary": "Dataset automatically created during the evaluation run of model [LTC-AI-Labs/L2-7b-Base-test-WVG](https://huggingface.co/LTC-AI-Labs/L2-7b-Base-test-WVG) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_LTC-AI-Labs__L2-7b-Base-test-WVG\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-28T07:52:27.189086](https://huggingface.co/datasets/open-llm-leaderboard/details_LTC-AI-Labs__L2-7b-Base-test-WVG/blob/main/results_2023-10-28T07-52-27.189086.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.003145973154362416,\n \"em_stderr\": 0.0005734993648436373,\n \"f1\": 0.07481229026845672,\n \"f1_stderr\": 0.0016422896702234556,\n \"acc\": 0.40267285313968093,\n \"acc_stderr\": 0.00970555723399882\n },\n \"harness|drop|3\": {\n \"em\": 0.003145973154362416,\n \"em_stderr\": 0.0005734993648436373,\n \"f1\": 0.07481229026845672,\n \"f1_stderr\": 0.0016422896702234556\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.06974981046247157,\n \"acc_stderr\": 0.007016389571013843\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7355958958168903,\n \"acc_stderr\": 0.012394724896983799\n }\n}\n```", "repo_url": "https://huggingface.co/LTC-AI-Labs/L2-7b-Base-test-WVG", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|arc:challenge|25_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_28T06_36_16.797528", "path": ["**/details_harness|drop|3_2023-10-28T06-36-16.797528.parquet"]}, {"split": "2023_10_28T07_52_27.189086", "path": ["**/details_harness|drop|3_2023-10-28T07-52-27.189086.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-28T07-52-27.189086.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_28T06_36_16.797528", "path": ["**/details_harness|gsm8k|5_2023-10-28T06-36-16.797528.parquet"]}, {"split": "2023_10_28T07_52_27.189086", "path": ["**/details_harness|gsm8k|5_2023-10-28T07-52-27.189086.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-28T07-52-27.189086.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hellaswag|10_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T17-19-09.186622.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T17-19-09.186622.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T17-19-09.186622.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_28T06_36_16.797528", "path": ["**/details_harness|winogrande|5_2023-10-28T06-36-16.797528.parquet"]}, {"split": "2023_10_28T07_52_27.189086", "path": ["**/details_harness|winogrande|5_2023-10-28T07-52-27.189086.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-28T07-52-27.189086.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T17_19_09.186622", "path": ["results_2023-10-03T17-19-09.186622.parquet"]}, {"split": "2023_10_28T06_36_16.797528", "path": ["results_2023-10-28T06-36-16.797528.parquet"]}, {"split": "2023_10_28T07_52_27.189086", "path": ["results_2023-10-28T07-52-27.189086.parquet"]}, {"split": "latest", "path": ["results_2023-10-28T07-52-27.189086.parquet"]}]}]}
2023-10-28T06:52:36+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of LTC-AI-Labs/L2-7b-Base-test-WVG ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model LTC-AI-Labs/L2-7b-Base-test-WVG on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-28T07:52:27.189086(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of LTC-AI-Labs/L2-7b-Base-test-WVG", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model LTC-AI-Labs/L2-7b-Base-test-WVG on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-28T07:52:27.189086(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of LTC-AI-Labs/L2-7b-Base-test-WVG", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model LTC-AI-Labs/L2-7b-Base-test-WVG on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-28T07:52:27.189086(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 29, 31, 177, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of LTC-AI-Labs/L2-7b-Base-test-WVG## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model LTC-AI-Labs/L2-7b-Base-test-WVG on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-28T07:52:27.189086(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
5846345bc314b149dbd950baf509131af97ed5a6
# Dataset Card for "JimmyLu" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
bigheiniuJ/JimmyLu
[ "region:us" ]
2023-10-03T16:24:12+00:00
{"dataset_info": {"features": [{"name": "output", "dtype": "string"}, {"name": "input", "dtype": "string"}, {"name": "seed", "dtype": "string"}, {"name": "split", "dtype": "string"}, {"name": "task", "dtype": "string"}, {"name": "options", "sequence": "string"}, {"name": "id", "dtype": "int64"}], "splits": [{"name": "dev", "num_bytes": 772928, "num_examples": 3150}, {"name": "test", "num_bytes": 25323226, "num_examples": 87430}, {"name": "train", "num_bytes": 793610, "num_examples": 3150}], "download_size": 9003720, "dataset_size": 26889764}, "configs": [{"config_name": "default", "data_files": [{"split": "dev", "path": "data/dev-*"}, {"split": "test", "path": "data/test-*"}, {"split": "train", "path": "data/train-*"}]}]}
2023-10-11T01:09:38+00:00
[]
[]
TAGS #region-us
# Dataset Card for "JimmyLu" More Information needed
[ "# Dataset Card for \"JimmyLu\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"JimmyLu\"\n\nMore Information needed" ]
[ 6, 14 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"JimmyLu\"\n\nMore Information needed" ]
6b132866ccc27d065ba4eabfea08cdbaab24fc1c
# Dataset Card for Evaluation run of ajibawa-2023/Uncensored-Frank-33B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/ajibawa-2023/Uncensored-Frank-33B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [ajibawa-2023/Uncensored-Frank-33B](https://huggingface.co/ajibawa-2023/Uncensored-Frank-33B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ajibawa-2023__Uncensored-Frank-33B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-30T04:12:18.796375](https://huggingface.co/datasets/open-llm-leaderboard/details_ajibawa-2023__Uncensored-Frank-33B/blob/main/results_2023-10-30T04-12-18.796375.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.17554530201342283, "em_stderr": 0.0038959884031644423, "f1": 0.2628104026845651, "f1_stderr": 0.003991015722513057, "acc": 0.4661905140880088, "acc_stderr": 0.01108732307443375 }, "harness|drop|3": { "em": 0.17554530201342283, "em_stderr": 0.0038959884031644423, "f1": 0.2628104026845651, "f1_stderr": 0.003991015722513057 }, "harness|gsm8k|5": { "acc": 0.16679302501895377, "acc_stderr": 0.010268516042629513 }, "harness|winogrande|5": { "acc": 0.7655880031570639, "acc_stderr": 0.011906130106237986 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_ajibawa-2023__Uncensored-Frank-33B
[ "region:us" ]
2023-10-03T16:30:23+00:00
{"pretty_name": "Evaluation run of ajibawa-2023/Uncensored-Frank-33B", "dataset_summary": "Dataset automatically created during the evaluation run of model [ajibawa-2023/Uncensored-Frank-33B](https://huggingface.co/ajibawa-2023/Uncensored-Frank-33B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ajibawa-2023__Uncensored-Frank-33B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-30T04:12:18.796375](https://huggingface.co/datasets/open-llm-leaderboard/details_ajibawa-2023__Uncensored-Frank-33B/blob/main/results_2023-10-30T04-12-18.796375.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.17554530201342283,\n \"em_stderr\": 0.0038959884031644423,\n \"f1\": 0.2628104026845651,\n \"f1_stderr\": 0.003991015722513057,\n \"acc\": 0.4661905140880088,\n \"acc_stderr\": 0.01108732307443375\n },\n \"harness|drop|3\": {\n \"em\": 0.17554530201342283,\n \"em_stderr\": 0.0038959884031644423,\n \"f1\": 0.2628104026845651,\n \"f1_stderr\": 0.003991015722513057\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.16679302501895377,\n \"acc_stderr\": 0.010268516042629513\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7655880031570639,\n \"acc_stderr\": 0.011906130106237986\n }\n}\n```", "repo_url": "https://huggingface.co/ajibawa-2023/Uncensored-Frank-33B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|arc:challenge|25_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_30T04_12_18.796375", "path": ["**/details_harness|drop|3_2023-10-30T04-12-18.796375.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-30T04-12-18.796375.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_30T04_12_18.796375", "path": ["**/details_harness|gsm8k|5_2023-10-30T04-12-18.796375.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-30T04-12-18.796375.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hellaswag|10_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T17-30-05.303429.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T17-30-05.303429.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T17-30-05.303429.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_30T04_12_18.796375", "path": ["**/details_harness|winogrande|5_2023-10-30T04-12-18.796375.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-30T04-12-18.796375.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T17_30_05.303429", "path": ["results_2023-10-03T17-30-05.303429.parquet"]}, {"split": "2023_10_30T04_12_18.796375", "path": ["results_2023-10-30T04-12-18.796375.parquet"]}, {"split": "latest", "path": ["results_2023-10-30T04-12-18.796375.parquet"]}]}]}
2023-10-30T04:12:31+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of ajibawa-2023/Uncensored-Frank-33B ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model ajibawa-2023/Uncensored-Frank-33B on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-30T04:12:18.796375(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of ajibawa-2023/Uncensored-Frank-33B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ajibawa-2023/Uncensored-Frank-33B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-30T04:12:18.796375(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of ajibawa-2023/Uncensored-Frank-33B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ajibawa-2023/Uncensored-Frank-33B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-30T04:12:18.796375(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 22, 31, 170, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of ajibawa-2023/Uncensored-Frank-33B## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model ajibawa-2023/Uncensored-Frank-33B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-30T04:12:18.796375(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
3e3f138c1739e8285cfb6df5ae409dc7f13b3b06
# Dataset Card for Evaluation run of Undi95/Emerald-13B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Undi95/Emerald-13B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Undi95/Emerald-13B](https://huggingface.co/Undi95/Emerald-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Undi95__Emerald-13B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-23T18:27:52.311274](https://huggingface.co/datasets/open-llm-leaderboard/details_Undi95__Emerald-13B/blob/main/results_2023-10-23T18-27-52.311274.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.11566694630872483, "em_stderr": 0.0032753085227622833, "f1": 0.18378460570469723, "f1_stderr": 0.003376754461365903, "acc": 0.4437006222575401, "acc_stderr": 0.010610978881102105 }, "harness|drop|3": { "em": 0.11566694630872483, "em_stderr": 0.0032753085227622833, "f1": 0.18378460570469723, "f1_stderr": 0.003376754461365903 }, "harness|gsm8k|5": { "acc": 0.1281273692191054, "acc_stderr": 0.009206398549980031 }, "harness|winogrande|5": { "acc": 0.7592738752959748, "acc_stderr": 0.012015559212224176 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_Undi95__Emerald-13B
[ "region:us" ]
2023-10-03T16:31:47+00:00
{"pretty_name": "Evaluation run of Undi95/Emerald-13B", "dataset_summary": "Dataset automatically created during the evaluation run of model [Undi95/Emerald-13B](https://huggingface.co/Undi95/Emerald-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Undi95__Emerald-13B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-23T18:27:52.311274](https://huggingface.co/datasets/open-llm-leaderboard/details_Undi95__Emerald-13B/blob/main/results_2023-10-23T18-27-52.311274.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.11566694630872483,\n \"em_stderr\": 0.0032753085227622833,\n \"f1\": 0.18378460570469723,\n \"f1_stderr\": 0.003376754461365903,\n \"acc\": 0.4437006222575401,\n \"acc_stderr\": 0.010610978881102105\n },\n \"harness|drop|3\": {\n \"em\": 0.11566694630872483,\n \"em_stderr\": 0.0032753085227622833,\n \"f1\": 0.18378460570469723,\n \"f1_stderr\": 0.003376754461365903\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.1281273692191054,\n \"acc_stderr\": 0.009206398549980031\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7592738752959748,\n \"acc_stderr\": 0.012015559212224176\n }\n}\n```", "repo_url": "https://huggingface.co/Undi95/Emerald-13B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|arc:challenge|25_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_23T18_27_52.311274", "path": ["**/details_harness|drop|3_2023-10-23T18-27-52.311274.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-23T18-27-52.311274.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_23T18_27_52.311274", "path": ["**/details_harness|gsm8k|5_2023-10-23T18-27-52.311274.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-23T18-27-52.311274.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hellaswag|10_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T17-31-23.265550.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T17-31-23.265550.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T17-31-23.265550.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_23T18_27_52.311274", "path": ["**/details_harness|winogrande|5_2023-10-23T18-27-52.311274.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-23T18-27-52.311274.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T17_31_23.265550", "path": ["results_2023-10-03T17-31-23.265550.parquet"]}, {"split": "2023_10_23T18_27_52.311274", "path": ["results_2023-10-23T18-27-52.311274.parquet"]}, {"split": "latest", "path": ["results_2023-10-23T18-27-52.311274.parquet"]}]}]}
2023-10-23T17:28:04+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Undi95/Emerald-13B ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model Undi95/Emerald-13B on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-23T18:27:52.311274(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of Undi95/Emerald-13B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Undi95/Emerald-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T18:27:52.311274(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Undi95/Emerald-13B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Undi95/Emerald-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T18:27:52.311274(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 18, 31, 166, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Undi95/Emerald-13B## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model Undi95/Emerald-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-23T18:27:52.311274(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
9f57e6a562ccbe594b428e795b242c58be067335
# Dataset Card for Evaluation run of Undi95/MXLewd-L2-20B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Undi95/MXLewd-L2-20B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Undi95/MXLewd-L2-20B](https://huggingface.co/Undi95/MXLewd-L2-20B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Undi95__MXLewd-L2-20B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-26T15:01:29.901026](https://huggingface.co/datasets/open-llm-leaderboard/details_Undi95__MXLewd-L2-20B/blob/main/results_2023-10-26T15-01-29.901026.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0759228187919463, "em_stderr": 0.002712563641278116, "f1": 0.1446036073825498, "f1_stderr": 0.0029538450645220115, "acc": 0.43501301181864477, "acc_stderr": 0.01028931557267752 }, "harness|drop|3": { "em": 0.0759228187919463, "em_stderr": 0.002712563641278116, "f1": 0.1446036073825498, "f1_stderr": 0.0029538450645220115 }, "harness|gsm8k|5": { "acc": 0.10917361637604246, "acc_stderr": 0.008590089300511132 }, "harness|winogrande|5": { "acc": 0.760852407261247, "acc_stderr": 0.01198854184484391 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_Undi95__MXLewd-L2-20B
[ "region:us" ]
2023-10-03T16:32:36+00:00
{"pretty_name": "Evaluation run of Undi95/MXLewd-L2-20B", "dataset_summary": "Dataset automatically created during the evaluation run of model [Undi95/MXLewd-L2-20B](https://huggingface.co/Undi95/MXLewd-L2-20B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Undi95__MXLewd-L2-20B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-26T15:01:29.901026](https://huggingface.co/datasets/open-llm-leaderboard/details_Undi95__MXLewd-L2-20B/blob/main/results_2023-10-26T15-01-29.901026.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0759228187919463,\n \"em_stderr\": 0.002712563641278116,\n \"f1\": 0.1446036073825498,\n \"f1_stderr\": 0.0029538450645220115,\n \"acc\": 0.43501301181864477,\n \"acc_stderr\": 0.01028931557267752\n },\n \"harness|drop|3\": {\n \"em\": 0.0759228187919463,\n \"em_stderr\": 0.002712563641278116,\n \"f1\": 0.1446036073825498,\n \"f1_stderr\": 0.0029538450645220115\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.10917361637604246,\n \"acc_stderr\": 0.008590089300511132\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.760852407261247,\n \"acc_stderr\": 0.01198854184484391\n }\n}\n```", "repo_url": "https://huggingface.co/Undi95/MXLewd-L2-20B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|arc:challenge|25_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_26T15_01_29.901026", "path": ["**/details_harness|drop|3_2023-10-26T15-01-29.901026.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-26T15-01-29.901026.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_26T15_01_29.901026", "path": ["**/details_harness|gsm8k|5_2023-10-26T15-01-29.901026.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-26T15-01-29.901026.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hellaswag|10_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T17-32-13.142085.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T17-32-13.142085.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T17-32-13.142085.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_26T15_01_29.901026", "path": ["**/details_harness|winogrande|5_2023-10-26T15-01-29.901026.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-26T15-01-29.901026.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T17_32_13.142085", "path": ["results_2023-10-03T17-32-13.142085.parquet"]}, {"split": "2023_10_26T15_01_29.901026", "path": ["results_2023-10-26T15-01-29.901026.parquet"]}, {"split": "latest", "path": ["results_2023-10-26T15-01-29.901026.parquet"]}]}]}
2023-10-26T14:01:43+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Undi95/MXLewd-L2-20B ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model Undi95/MXLewd-L2-20B on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-26T15:01:29.901026(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of Undi95/MXLewd-L2-20B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Undi95/MXLewd-L2-20B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-26T15:01:29.901026(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Undi95/MXLewd-L2-20B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Undi95/MXLewd-L2-20B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-26T15:01:29.901026(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 21, 31, 169, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Undi95/MXLewd-L2-20B## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model Undi95/MXLewd-L2-20B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-26T15:01:29.901026(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
f2ccb3b1a7694348701aa39c653f9d0ed847f5f9
# Dataset Card for Evaluation run of Undi95/Amethyst-13B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Undi95/Amethyst-13B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Undi95/Amethyst-13B](https://huggingface.co/Undi95/Amethyst-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Undi95__Amethyst-13B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-25T11:51:19.859333](https://huggingface.co/datasets/open-llm-leaderboard/details_Undi95__Amethyst-13B/blob/main/results_2023-10-25T11-51-19.859333.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.11891778523489933, "em_stderr": 0.003314906435546502, "f1": 0.18699769295301977, "f1_stderr": 0.0034428005809407332, "acc": 0.42792517590937623, "acc_stderr": 0.010387500478010799 }, "harness|drop|3": { "em": 0.11891778523489933, "em_stderr": 0.003314906435546502, "f1": 0.18699769295301977, "f1_stderr": 0.0034428005809407332 }, "harness|gsm8k|5": { "acc": 0.10841546626231995, "acc_stderr": 0.008563852506627492 }, "harness|winogrande|5": { "acc": 0.7474348855564326, "acc_stderr": 0.012211148449394105 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_Undi95__Amethyst-13B
[ "region:us" ]
2023-10-03T16:38:00+00:00
{"pretty_name": "Evaluation run of Undi95/Amethyst-13B", "dataset_summary": "Dataset automatically created during the evaluation run of model [Undi95/Amethyst-13B](https://huggingface.co/Undi95/Amethyst-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Undi95__Amethyst-13B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-25T11:51:19.859333](https://huggingface.co/datasets/open-llm-leaderboard/details_Undi95__Amethyst-13B/blob/main/results_2023-10-25T11-51-19.859333.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.11891778523489933,\n \"em_stderr\": 0.003314906435546502,\n \"f1\": 0.18699769295301977,\n \"f1_stderr\": 0.0034428005809407332,\n \"acc\": 0.42792517590937623,\n \"acc_stderr\": 0.010387500478010799\n },\n \"harness|drop|3\": {\n \"em\": 0.11891778523489933,\n \"em_stderr\": 0.003314906435546502,\n \"f1\": 0.18699769295301977,\n \"f1_stderr\": 0.0034428005809407332\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.10841546626231995,\n \"acc_stderr\": 0.008563852506627492\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7474348855564326,\n \"acc_stderr\": 0.012211148449394105\n }\n}\n```", "repo_url": "https://huggingface.co/Undi95/Amethyst-13B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|arc:challenge|25_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_25T11_51_19.859333", "path": ["**/details_harness|drop|3_2023-10-25T11-51-19.859333.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-25T11-51-19.859333.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_25T11_51_19.859333", "path": ["**/details_harness|gsm8k|5_2023-10-25T11-51-19.859333.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-25T11-51-19.859333.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hellaswag|10_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T17-37-36.187420.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T17-37-36.187420.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T17-37-36.187420.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_25T11_51_19.859333", "path": ["**/details_harness|winogrande|5_2023-10-25T11-51-19.859333.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-25T11-51-19.859333.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T17_37_36.187420", "path": ["results_2023-10-03T17-37-36.187420.parquet"]}, {"split": "2023_10_25T11_51_19.859333", "path": ["results_2023-10-25T11-51-19.859333.parquet"]}, {"split": "latest", "path": ["results_2023-10-25T11-51-19.859333.parquet"]}]}]}
2023-10-25T10:51:32+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Undi95/Amethyst-13B ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model Undi95/Amethyst-13B on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-25T11:51:19.859333(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of Undi95/Amethyst-13B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Undi95/Amethyst-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-25T11:51:19.859333(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Undi95/Amethyst-13B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Undi95/Amethyst-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-25T11:51:19.859333(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 18, 31, 166, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Undi95/Amethyst-13B## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model Undi95/Amethyst-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-25T11:51:19.859333(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
ee2735d5c6f0660f9bd7f691b8745b1640ffbfc0
# Dataset Card for Evaluation run of Undi95/MM-ReMM-L2-20B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Undi95/MM-ReMM-L2-20B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Undi95/MM-ReMM-L2-20B](https://huggingface.co/Undi95/MM-ReMM-L2-20B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 3 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Undi95__MM-ReMM-L2-20B_public", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-11-07T18:30:54.641369](https://huggingface.co/datasets/open-llm-leaderboard/details_Undi95__MM-ReMM-L2-20B_public/blob/main/results_2023-11-07T18-30-54.641369.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.10843120805369127, "em_stderr": 0.0031841552068159317, "f1": 0.18655725671140816, "f1_stderr": 0.003391655697712374, "acc": 0.4175133274651996, "acc_stderr": 0.009700033024698568 }, "harness|drop|3": { "em": 0.10843120805369127, "em_stderr": 0.0031841552068159317, "f1": 0.18655725671140816, "f1_stderr": 0.003391655697712374 }, "harness|gsm8k|5": { "acc": 0.07733131159969674, "acc_stderr": 0.00735771352322235 }, "harness|winogrande|5": { "acc": 0.7576953433307024, "acc_stderr": 0.012042352526174785 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_Undi95__MM-ReMM-L2-20B
[ "region:us" ]
2023-10-03T16:39:48+00:00
{"pretty_name": "Evaluation run of Undi95/MM-ReMM-L2-20B", "dataset_summary": "Dataset automatically created during the evaluation run of model [Undi95/MM-ReMM-L2-20B](https://huggingface.co/Undi95/MM-ReMM-L2-20B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Undi95__MM-ReMM-L2-20B_public\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-11-07T18:30:54.641369](https://huggingface.co/datasets/open-llm-leaderboard/details_Undi95__MM-ReMM-L2-20B_public/blob/main/results_2023-11-07T18-30-54.641369.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.10843120805369127,\n \"em_stderr\": 0.0031841552068159317,\n \"f1\": 0.18655725671140816,\n \"f1_stderr\": 0.003391655697712374,\n \"acc\": 0.4175133274651996,\n \"acc_stderr\": 0.009700033024698568\n },\n \"harness|drop|3\": {\n \"em\": 0.10843120805369127,\n \"em_stderr\": 0.0031841552068159317,\n \"f1\": 0.18655725671140816,\n \"f1_stderr\": 0.003391655697712374\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.07733131159969674,\n \"acc_stderr\": 0.00735771352322235\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7576953433307024,\n \"acc_stderr\": 0.012042352526174785\n }\n}\n```", "repo_url": "https://huggingface.co/Undi95/MM-ReMM-L2-20B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_drop_3", "data_files": [{"split": "2023_11_07T18_30_54.641369", "path": ["**/details_harness|drop|3_2023-11-07T18-30-54.641369.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-11-07T18-30-54.641369.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_11_07T18_30_54.641369", "path": ["**/details_harness|gsm8k|5_2023-11-07T18-30-54.641369.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-11-07T18-30-54.641369.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_11_07T18_30_54.641369", "path": ["**/details_harness|winogrande|5_2023-11-07T18-30-54.641369.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-11-07T18-30-54.641369.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_11_07T18_30_54.641369", "path": ["results_2023-11-07T18-30-54.641369.parquet"]}, {"split": "latest", "path": ["results_2023-11-07T18-30-54.641369.parquet"]}]}]}
2023-12-01T14:43:36+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Undi95/MM-ReMM-L2-20B ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model Undi95/MM-ReMM-L2-20B on the Open LLM Leaderboard. The dataset is composed of 3 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-11-07T18:30:54.641369(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of Undi95/MM-ReMM-L2-20B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Undi95/MM-ReMM-L2-20B on the Open LLM Leaderboard.\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-11-07T18:30:54.641369(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Undi95/MM-ReMM-L2-20B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Undi95/MM-ReMM-L2-20B on the Open LLM Leaderboard.\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-11-07T18:30:54.641369(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 22, 31, 171, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Undi95/MM-ReMM-L2-20B## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model Undi95/MM-ReMM-L2-20B on the Open LLM Leaderboard.\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-11-07T18:30:54.641369(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
bee9b1c1a86225ef01f5913617a121df8f2e3554
# Dataset Card for "govreport-qa-5-2048" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
shossain/govreport-qa-5-2048
[ "region:us" ]
2023-10-03T16:47:37+00:00
{"dataset_info": {"features": [{"name": "input_ids", "sequence": "int32"}, {"name": "attention_mask", "sequence": "int8"}, {"name": "labels", "sequence": "int64"}], "splits": [{"name": "train", "num_bytes": 133180, "num_examples": 5}], "download_size": 45937, "dataset_size": 133180}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-03T16:47:39+00:00
[]
[]
TAGS #region-us
# Dataset Card for "govreport-qa-5-2048" More Information needed
[ "# Dataset Card for \"govreport-qa-5-2048\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"govreport-qa-5-2048\"\n\nMore Information needed" ]
[ 6, 17 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"govreport-qa-5-2048\"\n\nMore Information needed" ]
757af029c1130f8aa280f06045c56597846a9833
# Dataset Card for Evaluation run of PygmalionAI/pygmalion-2-13b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/PygmalionAI/pygmalion-2-13b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [PygmalionAI/pygmalion-2-13b](https://huggingface.co/PygmalionAI/pygmalion-2-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_PygmalionAI__pygmalion-2-13b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-23T10:43:41.239191](https://huggingface.co/datasets/open-llm-leaderboard/details_PygmalionAI__pygmalion-2-13b/blob/main/results_2023-10-23T10-43-41.239191.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0016778523489932886, "em_stderr": 0.0004191330178826867, "f1": 0.06134752516778516, "f1_stderr": 0.0013751962272216705, "acc": 0.4490486622270704, "acc_stderr": 0.010250799808548879 }, "harness|drop|3": { "em": 0.0016778523489932886, "em_stderr": 0.0004191330178826867, "f1": 0.06134752516778516, "f1_stderr": 0.0013751962272216705 }, "harness|gsm8k|5": { "acc": 0.11751326762699014, "acc_stderr": 0.008870331256489975 }, "harness|winogrande|5": { "acc": 0.7805840568271507, "acc_stderr": 0.01163126836060778 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_PygmalionAI__pygmalion-2-13b
[ "region:us" ]
2023-10-03T16:49:44+00:00
{"pretty_name": "Evaluation run of PygmalionAI/pygmalion-2-13b", "dataset_summary": "Dataset automatically created during the evaluation run of model [PygmalionAI/pygmalion-2-13b](https://huggingface.co/PygmalionAI/pygmalion-2-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_PygmalionAI__pygmalion-2-13b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-23T10:43:41.239191](https://huggingface.co/datasets/open-llm-leaderboard/details_PygmalionAI__pygmalion-2-13b/blob/main/results_2023-10-23T10-43-41.239191.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0016778523489932886,\n \"em_stderr\": 0.0004191330178826867,\n \"f1\": 0.06134752516778516,\n \"f1_stderr\": 0.0013751962272216705,\n \"acc\": 0.4490486622270704,\n \"acc_stderr\": 0.010250799808548879\n },\n \"harness|drop|3\": {\n \"em\": 0.0016778523489932886,\n \"em_stderr\": 0.0004191330178826867,\n \"f1\": 0.06134752516778516,\n \"f1_stderr\": 0.0013751962272216705\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.11751326762699014,\n \"acc_stderr\": 0.008870331256489975\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7805840568271507,\n \"acc_stderr\": 0.01163126836060778\n }\n}\n```", "repo_url": "https://huggingface.co/PygmalionAI/pygmalion-2-13b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|arc:challenge|25_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_23T10_43_41.239191", "path": ["**/details_harness|drop|3_2023-10-23T10-43-41.239191.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-23T10-43-41.239191.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_23T10_43_41.239191", "path": ["**/details_harness|gsm8k|5_2023-10-23T10-43-41.239191.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-23T10-43-41.239191.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hellaswag|10_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T17-49-20.721820.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T17-49-20.721820.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T17-49-20.721820.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_23T10_43_41.239191", "path": ["**/details_harness|winogrande|5_2023-10-23T10-43-41.239191.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-23T10-43-41.239191.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T17_49_20.721820", "path": ["results_2023-10-03T17-49-20.721820.parquet"]}, {"split": "2023_10_23T10_43_41.239191", "path": ["results_2023-10-23T10-43-41.239191.parquet"]}, {"split": "latest", "path": ["results_2023-10-23T10-43-41.239191.parquet"]}]}]}
2023-10-23T09:43:53+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of PygmalionAI/pygmalion-2-13b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model PygmalionAI/pygmalion-2-13b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-23T10:43:41.239191(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of PygmalionAI/pygmalion-2-13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model PygmalionAI/pygmalion-2-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T10:43:41.239191(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of PygmalionAI/pygmalion-2-13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model PygmalionAI/pygmalion-2-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T10:43:41.239191(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 22, 31, 170, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of PygmalionAI/pygmalion-2-13b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model PygmalionAI/pygmalion-2-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-23T10:43:41.239191(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
4337cfc89e7c4ac6e7dc820a161f3c4ee993ed54
# Dataset Card for Evaluation run of Dampish/StellarX-4B-V0 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Dampish/StellarX-4B-V0 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Dampish/StellarX-4B-V0](https://huggingface.co/Dampish/StellarX-4B-V0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Dampish__StellarX-4B-V0", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-25T05:01:15.065069](https://huggingface.co/datasets/open-llm-leaderboard/details_Dampish__StellarX-4B-V0/blob/main/results_2023-10-25T05-01-15.065069.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0269505033557047, "em_stderr": 0.0016584048452624597, "f1": 0.1094840604026844, "f1_stderr": 0.002280673263690395, "acc": 0.31925808997632205, "acc_stderr": 0.006751239835395647 }, "harness|drop|3": { "em": 0.0269505033557047, "em_stderr": 0.0016584048452624597, "f1": 0.1094840604026844, "f1_stderr": 0.002280673263690395 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 }, "harness|winogrande|5": { "acc": 0.6385161799526441, "acc_stderr": 0.013502479670791294 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_Dampish__StellarX-4B-V0
[ "region:us" ]
2023-10-03T16:57:21+00:00
{"pretty_name": "Evaluation run of Dampish/StellarX-4B-V0", "dataset_summary": "Dataset automatically created during the evaluation run of model [Dampish/StellarX-4B-V0](https://huggingface.co/Dampish/StellarX-4B-V0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Dampish__StellarX-4B-V0\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-25T05:01:15.065069](https://huggingface.co/datasets/open-llm-leaderboard/details_Dampish__StellarX-4B-V0/blob/main/results_2023-10-25T05-01-15.065069.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0269505033557047,\n \"em_stderr\": 0.0016584048452624597,\n \"f1\": 0.1094840604026844,\n \"f1_stderr\": 0.002280673263690395,\n \"acc\": 0.31925808997632205,\n \"acc_stderr\": 0.006751239835395647\n },\n \"harness|drop|3\": {\n \"em\": 0.0269505033557047,\n \"em_stderr\": 0.0016584048452624597,\n \"f1\": 0.1094840604026844,\n \"f1_stderr\": 0.002280673263690395\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6385161799526441,\n \"acc_stderr\": 0.013502479670791294\n }\n}\n```", "repo_url": "https://huggingface.co/Dampish/StellarX-4B-V0", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|arc:challenge|25_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_25T05_01_15.065069", "path": ["**/details_harness|drop|3_2023-10-25T05-01-15.065069.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-25T05-01-15.065069.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_25T05_01_15.065069", "path": ["**/details_harness|gsm8k|5_2023-10-25T05-01-15.065069.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-25T05-01-15.065069.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hellaswag|10_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T17-57-03.227360.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T17-57-03.227360.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T17-57-03.227360.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_25T05_01_15.065069", "path": ["**/details_harness|winogrande|5_2023-10-25T05-01-15.065069.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-25T05-01-15.065069.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T17_57_03.227360", "path": ["results_2023-10-03T17-57-03.227360.parquet"]}, {"split": "2023_10_25T05_01_15.065069", "path": ["results_2023-10-25T05-01-15.065069.parquet"]}, {"split": "latest", "path": ["results_2023-10-25T05-01-15.065069.parquet"]}]}]}
2023-10-25T04:01:27+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Dampish/StellarX-4B-V0 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model Dampish/StellarX-4B-V0 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-25T05:01:15.065069(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of Dampish/StellarX-4B-V0", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Dampish/StellarX-4B-V0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-25T05:01:15.065069(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Dampish/StellarX-4B-V0", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Dampish/StellarX-4B-V0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-25T05:01:15.065069(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 21, 31, 169, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Dampish/StellarX-4B-V0## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model Dampish/StellarX-4B-V0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-25T05:01:15.065069(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
ae85b3fbd59d3d20be934a6c4176dc3e243168d2
# Dataset Card for Evaluation run of AtAndDev/ShortKingv0.1 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/AtAndDev/ShortKingv0.1 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [AtAndDev/ShortKingv0.1](https://huggingface.co/AtAndDev/ShortKingv0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_AtAndDev__ShortKingv0.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-24T00:55:05.543102](https://huggingface.co/datasets/open-llm-leaderboard/details_AtAndDev__ShortKingv0.1/blob/main/results_2023-10-24T00-55-05.543102.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0050335570469798654, "em_stderr": 0.0007247385547751907, "f1": 0.054680159395973316, "f1_stderr": 0.0014128539394208607, "acc": 0.28246387417700025, "acc_stderr": 0.007901602410009655 }, "harness|drop|3": { "em": 0.0050335570469798654, "em_stderr": 0.0007247385547751907, "f1": 0.054680159395973316, "f1_stderr": 0.0014128539394208607 }, "harness|gsm8k|5": { "acc": 0.004548900682335102, "acc_stderr": 0.0018535550440036204 }, "harness|winogrande|5": { "acc": 0.5603788476716653, "acc_stderr": 0.01394964977601569 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_AtAndDev__ShortKingv0.1
[ "region:us" ]
2023-10-03T16:59:56+00:00
{"pretty_name": "Evaluation run of AtAndDev/ShortKingv0.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [AtAndDev/ShortKingv0.1](https://huggingface.co/AtAndDev/ShortKingv0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_AtAndDev__ShortKingv0.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-24T00:55:05.543102](https://huggingface.co/datasets/open-llm-leaderboard/details_AtAndDev__ShortKingv0.1/blob/main/results_2023-10-24T00-55-05.543102.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0050335570469798654,\n \"em_stderr\": 0.0007247385547751907,\n \"f1\": 0.054680159395973316,\n \"f1_stderr\": 0.0014128539394208607,\n \"acc\": 0.28246387417700025,\n \"acc_stderr\": 0.007901602410009655\n },\n \"harness|drop|3\": {\n \"em\": 0.0050335570469798654,\n \"em_stderr\": 0.0007247385547751907,\n \"f1\": 0.054680159395973316,\n \"f1_stderr\": 0.0014128539394208607\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.004548900682335102,\n \"acc_stderr\": 0.0018535550440036204\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5603788476716653,\n \"acc_stderr\": 0.01394964977601569\n }\n}\n```", "repo_url": "https://huggingface.co/AtAndDev/ShortKingv0.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|arc:challenge|25_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_24T00_55_05.543102", "path": ["**/details_harness|drop|3_2023-10-24T00-55-05.543102.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-24T00-55-05.543102.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_24T00_55_05.543102", "path": ["**/details_harness|gsm8k|5_2023-10-24T00-55-05.543102.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-24T00-55-05.543102.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hellaswag|10_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T17-59-37.972814.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T17-59-37.972814.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T17-59-37.972814.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_24T00_55_05.543102", "path": ["**/details_harness|winogrande|5_2023-10-24T00-55-05.543102.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-24T00-55-05.543102.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T17_59_37.972814", "path": ["results_2023-10-03T17-59-37.972814.parquet"]}, {"split": "2023_10_24T00_55_05.543102", "path": ["results_2023-10-24T00-55-05.543102.parquet"]}, {"split": "latest", "path": ["results_2023-10-24T00-55-05.543102.parquet"]}]}]}
2023-10-23T23:55:17+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of AtAndDev/ShortKingv0.1 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model AtAndDev/ShortKingv0.1 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-24T00:55:05.543102(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of AtAndDev/ShortKingv0.1", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model AtAndDev/ShortKingv0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T00:55:05.543102(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of AtAndDev/ShortKingv0.1", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model AtAndDev/ShortKingv0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T00:55:05.543102(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 19, 31, 167, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of AtAndDev/ShortKingv0.1## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model AtAndDev/ShortKingv0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-24T00:55:05.543102(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
63edd23166d1b7d17781b238654f4656244370f4
# Dataset Card for Evaluation run of BramVanroy/Llama-2-13b-chat-dutch ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/BramVanroy/Llama-2-13b-chat-dutch - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [BramVanroy/Llama-2-13b-chat-dutch](https://huggingface.co/BramVanroy/Llama-2-13b-chat-dutch) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_BramVanroy__Llama-2-13b-chat-dutch", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-24T16:53:00.153691](https://huggingface.co/datasets/open-llm-leaderboard/details_BramVanroy__Llama-2-13b-chat-dutch/blob/main/results_2023-10-24T16-53-00.153691.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0017827181208053692, "em_stderr": 0.00043200973460387693, "f1": 0.0627757969798656, "f1_stderr": 0.001386827392539851, "acc": 0.4366382175872875, "acc_stderr": 0.010201588521352003 }, "harness|drop|3": { "em": 0.0017827181208053692, "em_stderr": 0.00043200973460387693, "f1": 0.0627757969798656, "f1_stderr": 0.001386827392539851 }, "harness|gsm8k|5": { "acc": 0.1068991660348749, "acc_stderr": 0.008510982565520481 }, "harness|winogrande|5": { "acc": 0.7663772691397001, "acc_stderr": 0.011892194477183524 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_BramVanroy__Llama-2-13b-chat-dutch
[ "region:us" ]
2023-10-03T17:08:38+00:00
{"pretty_name": "Evaluation run of BramVanroy/Llama-2-13b-chat-dutch", "dataset_summary": "Dataset automatically created during the evaluation run of model [BramVanroy/Llama-2-13b-chat-dutch](https://huggingface.co/BramVanroy/Llama-2-13b-chat-dutch) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_BramVanroy__Llama-2-13b-chat-dutch\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-24T16:53:00.153691](https://huggingface.co/datasets/open-llm-leaderboard/details_BramVanroy__Llama-2-13b-chat-dutch/blob/main/results_2023-10-24T16-53-00.153691.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0017827181208053692,\n \"em_stderr\": 0.00043200973460387693,\n \"f1\": 0.0627757969798656,\n \"f1_stderr\": 0.001386827392539851,\n \"acc\": 0.4366382175872875,\n \"acc_stderr\": 0.010201588521352003\n },\n \"harness|drop|3\": {\n \"em\": 0.0017827181208053692,\n \"em_stderr\": 0.00043200973460387693,\n \"f1\": 0.0627757969798656,\n \"f1_stderr\": 0.001386827392539851\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.1068991660348749,\n \"acc_stderr\": 0.008510982565520481\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7663772691397001,\n \"acc_stderr\": 0.011892194477183524\n }\n}\n```", "repo_url": "https://huggingface.co/BramVanroy/Llama-2-13b-chat-dutch", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|arc:challenge|25_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_24T16_53_00.153691", "path": ["**/details_harness|drop|3_2023-10-24T16-53-00.153691.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-24T16-53-00.153691.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_24T16_53_00.153691", "path": ["**/details_harness|gsm8k|5_2023-10-24T16-53-00.153691.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-24T16-53-00.153691.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hellaswag|10_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T18-08-13.956421.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T18-08-13.956421.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T18-08-13.956421.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_24T16_53_00.153691", "path": ["**/details_harness|winogrande|5_2023-10-24T16-53-00.153691.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-24T16-53-00.153691.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T18_08_13.956421", "path": ["results_2023-10-03T18-08-13.956421.parquet"]}, {"split": "2023_10_24T16_53_00.153691", "path": ["results_2023-10-24T16-53-00.153691.parquet"]}, {"split": "latest", "path": ["results_2023-10-24T16-53-00.153691.parquet"]}]}]}
2023-10-24T15:53:13+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of BramVanroy/Llama-2-13b-chat-dutch ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model BramVanroy/Llama-2-13b-chat-dutch on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-24T16:53:00.153691(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of BramVanroy/Llama-2-13b-chat-dutch", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model BramVanroy/Llama-2-13b-chat-dutch on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T16:53:00.153691(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of BramVanroy/Llama-2-13b-chat-dutch", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model BramVanroy/Llama-2-13b-chat-dutch on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T16:53:00.153691(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 24, 31, 172, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of BramVanroy/Llama-2-13b-chat-dutch## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model BramVanroy/Llama-2-13b-chat-dutch on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-24T16:53:00.153691(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
a7b035b2f1e78752ef74ad99da5aed8b88113c3f
# Dataset Card for Evaluation run of BramVanroy/llama2-13b-ft-mc4_nl_cleaned_tiny ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/BramVanroy/llama2-13b-ft-mc4_nl_cleaned_tiny - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [BramVanroy/llama2-13b-ft-mc4_nl_cleaned_tiny](https://huggingface.co/BramVanroy/llama2-13b-ft-mc4_nl_cleaned_tiny) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_BramVanroy__llama2-13b-ft-mc4_nl_cleaned_tiny", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-27T13:42:26.355186](https://huggingface.co/datasets/open-llm-leaderboard/details_BramVanroy__llama2-13b-ft-mc4_nl_cleaned_tiny/blob/main/results_2023-10-27T13-42-26.355186.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0018875838926174498, "em_stderr": 0.00044451099905589645, "f1": 0.06084312080536899, "f1_stderr": 0.001368437967296962, "acc": 0.4378999062335258, "acc_stderr": 0.01007752457729945 }, "harness|drop|3": { "em": 0.0018875838926174498, "em_stderr": 0.00044451099905589645, "f1": 0.06084312080536899, "f1_stderr": 0.001368437967296962 }, "harness|gsm8k|5": { "acc": 0.10310841546626232, "acc_stderr": 0.008376436987507814 }, "harness|winogrande|5": { "acc": 0.7726913970007893, "acc_stderr": 0.011778612167091087 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_BramVanroy__llama2-13b-ft-mc4_nl_cleaned_tiny
[ "region:us" ]
2023-10-03T17:14:54+00:00
{"pretty_name": "Evaluation run of BramVanroy/llama2-13b-ft-mc4_nl_cleaned_tiny", "dataset_summary": "Dataset automatically created during the evaluation run of model [BramVanroy/llama2-13b-ft-mc4_nl_cleaned_tiny](https://huggingface.co/BramVanroy/llama2-13b-ft-mc4_nl_cleaned_tiny) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_BramVanroy__llama2-13b-ft-mc4_nl_cleaned_tiny\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-27T13:42:26.355186](https://huggingface.co/datasets/open-llm-leaderboard/details_BramVanroy__llama2-13b-ft-mc4_nl_cleaned_tiny/blob/main/results_2023-10-27T13-42-26.355186.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0018875838926174498,\n \"em_stderr\": 0.00044451099905589645,\n \"f1\": 0.06084312080536899,\n \"f1_stderr\": 0.001368437967296962,\n \"acc\": 0.4378999062335258,\n \"acc_stderr\": 0.01007752457729945\n },\n \"harness|drop|3\": {\n \"em\": 0.0018875838926174498,\n \"em_stderr\": 0.00044451099905589645,\n \"f1\": 0.06084312080536899,\n \"f1_stderr\": 0.001368437967296962\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.10310841546626232,\n \"acc_stderr\": 0.008376436987507814\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7726913970007893,\n \"acc_stderr\": 0.011778612167091087\n }\n}\n```", "repo_url": "https://huggingface.co/BramVanroy/llama2-13b-ft-mc4_nl_cleaned_tiny", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|arc:challenge|25_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_27T13_42_26.355186", "path": ["**/details_harness|drop|3_2023-10-27T13-42-26.355186.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-27T13-42-26.355186.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_27T13_42_26.355186", "path": ["**/details_harness|gsm8k|5_2023-10-27T13-42-26.355186.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-27T13-42-26.355186.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hellaswag|10_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T18-14-29.012381.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T18-14-29.012381.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T18-14-29.012381.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_27T13_42_26.355186", "path": ["**/details_harness|winogrande|5_2023-10-27T13-42-26.355186.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-27T13-42-26.355186.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T18_14_29.012381", "path": ["results_2023-10-03T18-14-29.012381.parquet"]}, {"split": "2023_10_27T13_42_26.355186", "path": ["results_2023-10-27T13-42-26.355186.parquet"]}, {"split": "latest", "path": ["results_2023-10-27T13-42-26.355186.parquet"]}]}]}
2023-10-27T12:42:38+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of BramVanroy/llama2-13b-ft-mc4_nl_cleaned_tiny ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model BramVanroy/llama2-13b-ft-mc4_nl_cleaned_tiny on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-27T13:42:26.355186(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of BramVanroy/llama2-13b-ft-mc4_nl_cleaned_tiny", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model BramVanroy/llama2-13b-ft-mc4_nl_cleaned_tiny on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-27T13:42:26.355186(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of BramVanroy/llama2-13b-ft-mc4_nl_cleaned_tiny", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model BramVanroy/llama2-13b-ft-mc4_nl_cleaned_tiny on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-27T13:42:26.355186(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 33, 31, 181, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of BramVanroy/llama2-13b-ft-mc4_nl_cleaned_tiny## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model BramVanroy/llama2-13b-ft-mc4_nl_cleaned_tiny on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-27T13:42:26.355186(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
eaef381c30320ca466991df681ecba0e4edc4667
# Dataset Card for Evaluation run of haoranxu/ALMA-13B-Pretrain ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/haoranxu/ALMA-13B-Pretrain - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [haoranxu/ALMA-13B-Pretrain](https://huggingface.co/haoranxu/ALMA-13B-Pretrain) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_haoranxu__ALMA-13B-Pretrain", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-24T19:44:22.672013](https://huggingface.co/datasets/open-llm-leaderboard/details_haoranxu__ALMA-13B-Pretrain/blob/main/results_2023-10-24T19-44-22.672013.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0016778523489932886, "em_stderr": 0.00041913301788269345, "f1": 0.0558294882550337, "f1_stderr": 0.0013237506266727554, "acc": 0.4263565172486631, "acc_stderr": 0.00988264379366717 }, "harness|drop|3": { "em": 0.0016778523489932886, "em_stderr": 0.00041913301788269345, "f1": 0.0558294882550337, "f1_stderr": 0.0013237506266727554 }, "harness|gsm8k|5": { "acc": 0.0887035633055345, "acc_stderr": 0.007831458737058719 }, "harness|winogrande|5": { "acc": 0.7640094711917916, "acc_stderr": 0.011933828850275623 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_haoranxu__ALMA-13B-Pretrain
[ "region:us" ]
2023-10-03T17:16:51+00:00
{"pretty_name": "Evaluation run of haoranxu/ALMA-13B-Pretrain", "dataset_summary": "Dataset automatically created during the evaluation run of model [haoranxu/ALMA-13B-Pretrain](https://huggingface.co/haoranxu/ALMA-13B-Pretrain) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_haoranxu__ALMA-13B-Pretrain\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-24T19:44:22.672013](https://huggingface.co/datasets/open-llm-leaderboard/details_haoranxu__ALMA-13B-Pretrain/blob/main/results_2023-10-24T19-44-22.672013.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0016778523489932886,\n \"em_stderr\": 0.00041913301788269345,\n \"f1\": 0.0558294882550337,\n \"f1_stderr\": 0.0013237506266727554,\n \"acc\": 0.4263565172486631,\n \"acc_stderr\": 0.00988264379366717\n },\n \"harness|drop|3\": {\n \"em\": 0.0016778523489932886,\n \"em_stderr\": 0.00041913301788269345,\n \"f1\": 0.0558294882550337,\n \"f1_stderr\": 0.0013237506266727554\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0887035633055345,\n \"acc_stderr\": 0.007831458737058719\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7640094711917916,\n \"acc_stderr\": 0.011933828850275623\n }\n}\n```", "repo_url": "https://huggingface.co/haoranxu/ALMA-13B-Pretrain", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|arc:challenge|25_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_24T19_44_22.672013", "path": ["**/details_harness|drop|3_2023-10-24T19-44-22.672013.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-24T19-44-22.672013.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_24T19_44_22.672013", "path": ["**/details_harness|gsm8k|5_2023-10-24T19-44-22.672013.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-24T19-44-22.672013.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hellaswag|10_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T18-16-28.187729.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T18-16-28.187729.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T18-16-28.187729.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_24T19_44_22.672013", "path": ["**/details_harness|winogrande|5_2023-10-24T19-44-22.672013.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-24T19-44-22.672013.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T18_16_28.187729", "path": ["results_2023-10-03T18-16-28.187729.parquet"]}, {"split": "2023_10_24T19_44_22.672013", "path": ["results_2023-10-24T19-44-22.672013.parquet"]}, {"split": "latest", "path": ["results_2023-10-24T19-44-22.672013.parquet"]}]}]}
2023-10-24T18:44:35+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of haoranxu/ALMA-13B-Pretrain ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model haoranxu/ALMA-13B-Pretrain on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-24T19:44:22.672013(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of haoranxu/ALMA-13B-Pretrain", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model haoranxu/ALMA-13B-Pretrain on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T19:44:22.672013(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of haoranxu/ALMA-13B-Pretrain", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model haoranxu/ALMA-13B-Pretrain on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T19:44:22.672013(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 20, 31, 168, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of haoranxu/ALMA-13B-Pretrain## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model haoranxu/ALMA-13B-Pretrain on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-24T19:44:22.672013(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
bfb2866cc6b11aed19b37fdf4b25b149395b1e8d
# Bangumi Image Base of Monogatari Series This is the image base of bangumi Monogatari Series, we detected 66 characters, 8964 images in total. The full dataset is [here](all.zip). **Please note that these image bases are not guaranteed to be 100% cleaned, they may be noisy actual.** If you intend to manually train models using this dataset, we recommend performing necessary preprocessing on the downloaded dataset to eliminate potential noisy samples (approximately 1% probability). Here is the characters' preview: | # | Images | Download | Preview 1 | Preview 2 | Preview 3 | Preview 4 | Preview 5 | Preview 6 | Preview 7 | Preview 8 | |:------|---------:|:---------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------| | 0 | 2206 | [Download](0/dataset.zip) | ![preview 1](0/preview_1.png) | ![preview 2](0/preview_2.png) | ![preview 3](0/preview_3.png) | ![preview 4](0/preview_4.png) | ![preview 5](0/preview_5.png) | ![preview 6](0/preview_6.png) | ![preview 7](0/preview_7.png) | ![preview 8](0/preview_8.png) | | 1 | 64 | [Download](1/dataset.zip) | ![preview 1](1/preview_1.png) | ![preview 2](1/preview_2.png) | ![preview 3](1/preview_3.png) | ![preview 4](1/preview_4.png) | ![preview 5](1/preview_5.png) | ![preview 6](1/preview_6.png) | ![preview 7](1/preview_7.png) | ![preview 8](1/preview_8.png) | | 2 | 82 | [Download](2/dataset.zip) | ![preview 1](2/preview_1.png) | ![preview 2](2/preview_2.png) | ![preview 3](2/preview_3.png) | ![preview 4](2/preview_4.png) | ![preview 5](2/preview_5.png) | ![preview 6](2/preview_6.png) | ![preview 7](2/preview_7.png) | ![preview 8](2/preview_8.png) | | 3 | 163 | [Download](3/dataset.zip) | ![preview 1](3/preview_1.png) | ![preview 2](3/preview_2.png) | ![preview 3](3/preview_3.png) | ![preview 4](3/preview_4.png) | ![preview 5](3/preview_5.png) | ![preview 6](3/preview_6.png) | ![preview 7](3/preview_7.png) | ![preview 8](3/preview_8.png) | | 4 | 180 | [Download](4/dataset.zip) | ![preview 1](4/preview_1.png) | ![preview 2](4/preview_2.png) | ![preview 3](4/preview_3.png) | ![preview 4](4/preview_4.png) | ![preview 5](4/preview_5.png) | ![preview 6](4/preview_6.png) | ![preview 7](4/preview_7.png) | ![preview 8](4/preview_8.png) | | 5 | 106 | [Download](5/dataset.zip) | ![preview 1](5/preview_1.png) | ![preview 2](5/preview_2.png) | ![preview 3](5/preview_3.png) | ![preview 4](5/preview_4.png) | ![preview 5](5/preview_5.png) | ![preview 6](5/preview_6.png) | ![preview 7](5/preview_7.png) | ![preview 8](5/preview_8.png) | | 6 | 354 | [Download](6/dataset.zip) | ![preview 1](6/preview_1.png) | ![preview 2](6/preview_2.png) | ![preview 3](6/preview_3.png) | ![preview 4](6/preview_4.png) | ![preview 5](6/preview_5.png) | ![preview 6](6/preview_6.png) | ![preview 7](6/preview_7.png) | ![preview 8](6/preview_8.png) | | 7 | 63 | [Download](7/dataset.zip) | ![preview 1](7/preview_1.png) | ![preview 2](7/preview_2.png) | ![preview 3](7/preview_3.png) | ![preview 4](7/preview_4.png) | ![preview 5](7/preview_5.png) | ![preview 6](7/preview_6.png) | ![preview 7](7/preview_7.png) | ![preview 8](7/preview_8.png) | | 8 | 166 | [Download](8/dataset.zip) | ![preview 1](8/preview_1.png) | ![preview 2](8/preview_2.png) | ![preview 3](8/preview_3.png) | ![preview 4](8/preview_4.png) | ![preview 5](8/preview_5.png) | ![preview 6](8/preview_6.png) | ![preview 7](8/preview_7.png) | ![preview 8](8/preview_8.png) | | 9 | 121 | [Download](9/dataset.zip) | ![preview 1](9/preview_1.png) | ![preview 2](9/preview_2.png) | ![preview 3](9/preview_3.png) | ![preview 4](9/preview_4.png) | ![preview 5](9/preview_5.png) | ![preview 6](9/preview_6.png) | ![preview 7](9/preview_7.png) | ![preview 8](9/preview_8.png) | | 10 | 545 | [Download](10/dataset.zip) | ![preview 1](10/preview_1.png) | ![preview 2](10/preview_2.png) | ![preview 3](10/preview_3.png) | ![preview 4](10/preview_4.png) | ![preview 5](10/preview_5.png) | ![preview 6](10/preview_6.png) | ![preview 7](10/preview_7.png) | ![preview 8](10/preview_8.png) | | 11 | 302 | [Download](11/dataset.zip) | ![preview 1](11/preview_1.png) | ![preview 2](11/preview_2.png) | ![preview 3](11/preview_3.png) | ![preview 4](11/preview_4.png) | ![preview 5](11/preview_5.png) | ![preview 6](11/preview_6.png) | ![preview 7](11/preview_7.png) | ![preview 8](11/preview_8.png) | | 12 | 92 | [Download](12/dataset.zip) | ![preview 1](12/preview_1.png) | ![preview 2](12/preview_2.png) | ![preview 3](12/preview_3.png) | ![preview 4](12/preview_4.png) | ![preview 5](12/preview_5.png) | ![preview 6](12/preview_6.png) | ![preview 7](12/preview_7.png) | ![preview 8](12/preview_8.png) | | 13 | 399 | [Download](13/dataset.zip) | ![preview 1](13/preview_1.png) | ![preview 2](13/preview_2.png) | ![preview 3](13/preview_3.png) | ![preview 4](13/preview_4.png) | ![preview 5](13/preview_5.png) | ![preview 6](13/preview_6.png) | ![preview 7](13/preview_7.png) | ![preview 8](13/preview_8.png) | | 14 | 170 | [Download](14/dataset.zip) | ![preview 1](14/preview_1.png) | ![preview 2](14/preview_2.png) | ![preview 3](14/preview_3.png) | ![preview 4](14/preview_4.png) | ![preview 5](14/preview_5.png) | ![preview 6](14/preview_6.png) | ![preview 7](14/preview_7.png) | ![preview 8](14/preview_8.png) | | 15 | 86 | [Download](15/dataset.zip) | ![preview 1](15/preview_1.png) | ![preview 2](15/preview_2.png) | ![preview 3](15/preview_3.png) | ![preview 4](15/preview_4.png) | ![preview 5](15/preview_5.png) | ![preview 6](15/preview_6.png) | ![preview 7](15/preview_7.png) | ![preview 8](15/preview_8.png) | | 16 | 126 | [Download](16/dataset.zip) | ![preview 1](16/preview_1.png) | ![preview 2](16/preview_2.png) | ![preview 3](16/preview_3.png) | ![preview 4](16/preview_4.png) | ![preview 5](16/preview_5.png) | ![preview 6](16/preview_6.png) | ![preview 7](16/preview_7.png) | ![preview 8](16/preview_8.png) | | 17 | 25 | [Download](17/dataset.zip) | ![preview 1](17/preview_1.png) | ![preview 2](17/preview_2.png) | ![preview 3](17/preview_3.png) | ![preview 4](17/preview_4.png) | ![preview 5](17/preview_5.png) | ![preview 6](17/preview_6.png) | ![preview 7](17/preview_7.png) | ![preview 8](17/preview_8.png) | | 18 | 289 | [Download](18/dataset.zip) | ![preview 1](18/preview_1.png) | ![preview 2](18/preview_2.png) | ![preview 3](18/preview_3.png) | ![preview 4](18/preview_4.png) | ![preview 5](18/preview_5.png) | ![preview 6](18/preview_6.png) | ![preview 7](18/preview_7.png) | ![preview 8](18/preview_8.png) | | 19 | 39 | [Download](19/dataset.zip) | ![preview 1](19/preview_1.png) | ![preview 2](19/preview_2.png) | ![preview 3](19/preview_3.png) | ![preview 4](19/preview_4.png) | ![preview 5](19/preview_5.png) | ![preview 6](19/preview_6.png) | ![preview 7](19/preview_7.png) | ![preview 8](19/preview_8.png) | | 20 | 52 | [Download](20/dataset.zip) | ![preview 1](20/preview_1.png) | ![preview 2](20/preview_2.png) | ![preview 3](20/preview_3.png) | ![preview 4](20/preview_4.png) | ![preview 5](20/preview_5.png) | ![preview 6](20/preview_6.png) | ![preview 7](20/preview_7.png) | ![preview 8](20/preview_8.png) | | 21 | 57 | [Download](21/dataset.zip) | ![preview 1](21/preview_1.png) | ![preview 2](21/preview_2.png) | ![preview 3](21/preview_3.png) | ![preview 4](21/preview_4.png) | ![preview 5](21/preview_5.png) | ![preview 6](21/preview_6.png) | ![preview 7](21/preview_7.png) | ![preview 8](21/preview_8.png) | | 22 | 24 | [Download](22/dataset.zip) | ![preview 1](22/preview_1.png) | ![preview 2](22/preview_2.png) | ![preview 3](22/preview_3.png) | ![preview 4](22/preview_4.png) | ![preview 5](22/preview_5.png) | ![preview 6](22/preview_6.png) | ![preview 7](22/preview_7.png) | ![preview 8](22/preview_8.png) | | 23 | 275 | [Download](23/dataset.zip) | ![preview 1](23/preview_1.png) | ![preview 2](23/preview_2.png) | ![preview 3](23/preview_3.png) | ![preview 4](23/preview_4.png) | ![preview 5](23/preview_5.png) | ![preview 6](23/preview_6.png) | ![preview 7](23/preview_7.png) | ![preview 8](23/preview_8.png) | | 24 | 48 | [Download](24/dataset.zip) | ![preview 1](24/preview_1.png) | ![preview 2](24/preview_2.png) | ![preview 3](24/preview_3.png) | ![preview 4](24/preview_4.png) | ![preview 5](24/preview_5.png) | ![preview 6](24/preview_6.png) | ![preview 7](24/preview_7.png) | ![preview 8](24/preview_8.png) | | 25 | 77 | [Download](25/dataset.zip) | ![preview 1](25/preview_1.png) | ![preview 2](25/preview_2.png) | ![preview 3](25/preview_3.png) | ![preview 4](25/preview_4.png) | ![preview 5](25/preview_5.png) | ![preview 6](25/preview_6.png) | ![preview 7](25/preview_7.png) | ![preview 8](25/preview_8.png) | | 26 | 96 | [Download](26/dataset.zip) | ![preview 1](26/preview_1.png) | ![preview 2](26/preview_2.png) | ![preview 3](26/preview_3.png) | ![preview 4](26/preview_4.png) | ![preview 5](26/preview_5.png) | ![preview 6](26/preview_6.png) | ![preview 7](26/preview_7.png) | ![preview 8](26/preview_8.png) | | 27 | 50 | [Download](27/dataset.zip) | ![preview 1](27/preview_1.png) | ![preview 2](27/preview_2.png) | ![preview 3](27/preview_3.png) | ![preview 4](27/preview_4.png) | ![preview 5](27/preview_5.png) | ![preview 6](27/preview_6.png) | ![preview 7](27/preview_7.png) | ![preview 8](27/preview_8.png) | | 28 | 41 | [Download](28/dataset.zip) | ![preview 1](28/preview_1.png) | ![preview 2](28/preview_2.png) | ![preview 3](28/preview_3.png) | ![preview 4](28/preview_4.png) | ![preview 5](28/preview_5.png) | ![preview 6](28/preview_6.png) | ![preview 7](28/preview_7.png) | ![preview 8](28/preview_8.png) | | 29 | 99 | [Download](29/dataset.zip) | ![preview 1](29/preview_1.png) | ![preview 2](29/preview_2.png) | ![preview 3](29/preview_3.png) | ![preview 4](29/preview_4.png) | ![preview 5](29/preview_5.png) | ![preview 6](29/preview_6.png) | ![preview 7](29/preview_7.png) | ![preview 8](29/preview_8.png) | | 30 | 22 | [Download](30/dataset.zip) | ![preview 1](30/preview_1.png) | ![preview 2](30/preview_2.png) | ![preview 3](30/preview_3.png) | ![preview 4](30/preview_4.png) | ![preview 5](30/preview_5.png) | ![preview 6](30/preview_6.png) | ![preview 7](30/preview_7.png) | ![preview 8](30/preview_8.png) | | 31 | 37 | [Download](31/dataset.zip) | ![preview 1](31/preview_1.png) | ![preview 2](31/preview_2.png) | ![preview 3](31/preview_3.png) | ![preview 4](31/preview_4.png) | ![preview 5](31/preview_5.png) | ![preview 6](31/preview_6.png) | ![preview 7](31/preview_7.png) | ![preview 8](31/preview_8.png) | | 32 | 282 | [Download](32/dataset.zip) | ![preview 1](32/preview_1.png) | ![preview 2](32/preview_2.png) | ![preview 3](32/preview_3.png) | ![preview 4](32/preview_4.png) | ![preview 5](32/preview_5.png) | ![preview 6](32/preview_6.png) | ![preview 7](32/preview_7.png) | ![preview 8](32/preview_8.png) | | 33 | 66 | [Download](33/dataset.zip) | ![preview 1](33/preview_1.png) | ![preview 2](33/preview_2.png) | ![preview 3](33/preview_3.png) | ![preview 4](33/preview_4.png) | ![preview 5](33/preview_5.png) | ![preview 6](33/preview_6.png) | ![preview 7](33/preview_7.png) | ![preview 8](33/preview_8.png) | | 34 | 61 | [Download](34/dataset.zip) | ![preview 1](34/preview_1.png) | ![preview 2](34/preview_2.png) | ![preview 3](34/preview_3.png) | ![preview 4](34/preview_4.png) | ![preview 5](34/preview_5.png) | ![preview 6](34/preview_6.png) | ![preview 7](34/preview_7.png) | ![preview 8](34/preview_8.png) | | 35 | 26 | [Download](35/dataset.zip) | ![preview 1](35/preview_1.png) | ![preview 2](35/preview_2.png) | ![preview 3](35/preview_3.png) | ![preview 4](35/preview_4.png) | ![preview 5](35/preview_5.png) | ![preview 6](35/preview_6.png) | ![preview 7](35/preview_7.png) | ![preview 8](35/preview_8.png) | | 36 | 18 | [Download](36/dataset.zip) | ![preview 1](36/preview_1.png) | ![preview 2](36/preview_2.png) | ![preview 3](36/preview_3.png) | ![preview 4](36/preview_4.png) | ![preview 5](36/preview_5.png) | ![preview 6](36/preview_6.png) | ![preview 7](36/preview_7.png) | ![preview 8](36/preview_8.png) | | 37 | 158 | [Download](37/dataset.zip) | ![preview 1](37/preview_1.png) | ![preview 2](37/preview_2.png) | ![preview 3](37/preview_3.png) | ![preview 4](37/preview_4.png) | ![preview 5](37/preview_5.png) | ![preview 6](37/preview_6.png) | ![preview 7](37/preview_7.png) | ![preview 8](37/preview_8.png) | | 38 | 431 | [Download](38/dataset.zip) | ![preview 1](38/preview_1.png) | ![preview 2](38/preview_2.png) | ![preview 3](38/preview_3.png) | ![preview 4](38/preview_4.png) | ![preview 5](38/preview_5.png) | ![preview 6](38/preview_6.png) | ![preview 7](38/preview_7.png) | ![preview 8](38/preview_8.png) | | 39 | 25 | [Download](39/dataset.zip) | ![preview 1](39/preview_1.png) | ![preview 2](39/preview_2.png) | ![preview 3](39/preview_3.png) | ![preview 4](39/preview_4.png) | ![preview 5](39/preview_5.png) | ![preview 6](39/preview_6.png) | ![preview 7](39/preview_7.png) | ![preview 8](39/preview_8.png) | | 40 | 23 | [Download](40/dataset.zip) | ![preview 1](40/preview_1.png) | ![preview 2](40/preview_2.png) | ![preview 3](40/preview_3.png) | ![preview 4](40/preview_4.png) | ![preview 5](40/preview_5.png) | ![preview 6](40/preview_6.png) | ![preview 7](40/preview_7.png) | ![preview 8](40/preview_8.png) | | 41 | 19 | [Download](41/dataset.zip) | ![preview 1](41/preview_1.png) | ![preview 2](41/preview_2.png) | ![preview 3](41/preview_3.png) | ![preview 4](41/preview_4.png) | ![preview 5](41/preview_5.png) | ![preview 6](41/preview_6.png) | ![preview 7](41/preview_7.png) | ![preview 8](41/preview_8.png) | | 42 | 35 | [Download](42/dataset.zip) | ![preview 1](42/preview_1.png) | ![preview 2](42/preview_2.png) | ![preview 3](42/preview_3.png) | ![preview 4](42/preview_4.png) | ![preview 5](42/preview_5.png) | ![preview 6](42/preview_6.png) | ![preview 7](42/preview_7.png) | ![preview 8](42/preview_8.png) | | 43 | 11 | [Download](43/dataset.zip) | ![preview 1](43/preview_1.png) | ![preview 2](43/preview_2.png) | ![preview 3](43/preview_3.png) | ![preview 4](43/preview_4.png) | ![preview 5](43/preview_5.png) | ![preview 6](43/preview_6.png) | ![preview 7](43/preview_7.png) | ![preview 8](43/preview_8.png) | | 44 | 10 | [Download](44/dataset.zip) | ![preview 1](44/preview_1.png) | ![preview 2](44/preview_2.png) | ![preview 3](44/preview_3.png) | ![preview 4](44/preview_4.png) | ![preview 5](44/preview_5.png) | ![preview 6](44/preview_6.png) | ![preview 7](44/preview_7.png) | ![preview 8](44/preview_8.png) | | 45 | 18 | [Download](45/dataset.zip) | ![preview 1](45/preview_1.png) | ![preview 2](45/preview_2.png) | ![preview 3](45/preview_3.png) | ![preview 4](45/preview_4.png) | ![preview 5](45/preview_5.png) | ![preview 6](45/preview_6.png) | ![preview 7](45/preview_7.png) | ![preview 8](45/preview_8.png) | | 46 | 21 | [Download](46/dataset.zip) | ![preview 1](46/preview_1.png) | ![preview 2](46/preview_2.png) | ![preview 3](46/preview_3.png) | ![preview 4](46/preview_4.png) | ![preview 5](46/preview_5.png) | ![preview 6](46/preview_6.png) | ![preview 7](46/preview_7.png) | ![preview 8](46/preview_8.png) | | 47 | 447 | [Download](47/dataset.zip) | ![preview 1](47/preview_1.png) | ![preview 2](47/preview_2.png) | ![preview 3](47/preview_3.png) | ![preview 4](47/preview_4.png) | ![preview 5](47/preview_5.png) | ![preview 6](47/preview_6.png) | ![preview 7](47/preview_7.png) | ![preview 8](47/preview_8.png) | | 48 | 38 | [Download](48/dataset.zip) | ![preview 1](48/preview_1.png) | ![preview 2](48/preview_2.png) | ![preview 3](48/preview_3.png) | ![preview 4](48/preview_4.png) | ![preview 5](48/preview_5.png) | ![preview 6](48/preview_6.png) | ![preview 7](48/preview_7.png) | ![preview 8](48/preview_8.png) | | 49 | 53 | [Download](49/dataset.zip) | ![preview 1](49/preview_1.png) | ![preview 2](49/preview_2.png) | ![preview 3](49/preview_3.png) | ![preview 4](49/preview_4.png) | ![preview 5](49/preview_5.png) | ![preview 6](49/preview_6.png) | ![preview 7](49/preview_7.png) | ![preview 8](49/preview_8.png) | | 50 | 48 | [Download](50/dataset.zip) | ![preview 1](50/preview_1.png) | ![preview 2](50/preview_2.png) | ![preview 3](50/preview_3.png) | ![preview 4](50/preview_4.png) | ![preview 5](50/preview_5.png) | ![preview 6](50/preview_6.png) | ![preview 7](50/preview_7.png) | ![preview 8](50/preview_8.png) | | 51 | 33 | [Download](51/dataset.zip) | ![preview 1](51/preview_1.png) | ![preview 2](51/preview_2.png) | ![preview 3](51/preview_3.png) | ![preview 4](51/preview_4.png) | ![preview 5](51/preview_5.png) | ![preview 6](51/preview_6.png) | ![preview 7](51/preview_7.png) | ![preview 8](51/preview_8.png) | | 52 | 78 | [Download](52/dataset.zip) | ![preview 1](52/preview_1.png) | ![preview 2](52/preview_2.png) | ![preview 3](52/preview_3.png) | ![preview 4](52/preview_4.png) | ![preview 5](52/preview_5.png) | ![preview 6](52/preview_6.png) | ![preview 7](52/preview_7.png) | ![preview 8](52/preview_8.png) | | 53 | 8 | [Download](53/dataset.zip) | ![preview 1](53/preview_1.png) | ![preview 2](53/preview_2.png) | ![preview 3](53/preview_3.png) | ![preview 4](53/preview_4.png) | ![preview 5](53/preview_5.png) | ![preview 6](53/preview_6.png) | ![preview 7](53/preview_7.png) | ![preview 8](53/preview_8.png) | | 54 | 25 | [Download](54/dataset.zip) | ![preview 1](54/preview_1.png) | ![preview 2](54/preview_2.png) | ![preview 3](54/preview_3.png) | ![preview 4](54/preview_4.png) | ![preview 5](54/preview_5.png) | ![preview 6](54/preview_6.png) | ![preview 7](54/preview_7.png) | ![preview 8](54/preview_8.png) | | 55 | 100 | [Download](55/dataset.zip) | ![preview 1](55/preview_1.png) | ![preview 2](55/preview_2.png) | ![preview 3](55/preview_3.png) | ![preview 4](55/preview_4.png) | ![preview 5](55/preview_5.png) | ![preview 6](55/preview_6.png) | ![preview 7](55/preview_7.png) | ![preview 8](55/preview_8.png) | | 56 | 42 | [Download](56/dataset.zip) | ![preview 1](56/preview_1.png) | ![preview 2](56/preview_2.png) | ![preview 3](56/preview_3.png) | ![preview 4](56/preview_4.png) | ![preview 5](56/preview_5.png) | ![preview 6](56/preview_6.png) | ![preview 7](56/preview_7.png) | ![preview 8](56/preview_8.png) | | 57 | 12 | [Download](57/dataset.zip) | ![preview 1](57/preview_1.png) | ![preview 2](57/preview_2.png) | ![preview 3](57/preview_3.png) | ![preview 4](57/preview_4.png) | ![preview 5](57/preview_5.png) | ![preview 6](57/preview_6.png) | ![preview 7](57/preview_7.png) | ![preview 8](57/preview_8.png) | | 58 | 13 | [Download](58/dataset.zip) | ![preview 1](58/preview_1.png) | ![preview 2](58/preview_2.png) | ![preview 3](58/preview_3.png) | ![preview 4](58/preview_4.png) | ![preview 5](58/preview_5.png) | ![preview 6](58/preview_6.png) | ![preview 7](58/preview_7.png) | ![preview 8](58/preview_8.png) | | 59 | 6 | [Download](59/dataset.zip) | ![preview 1](59/preview_1.png) | ![preview 2](59/preview_2.png) | ![preview 3](59/preview_3.png) | ![preview 4](59/preview_4.png) | ![preview 5](59/preview_5.png) | ![preview 6](59/preview_6.png) | N/A | N/A | | 60 | 11 | [Download](60/dataset.zip) | ![preview 1](60/preview_1.png) | ![preview 2](60/preview_2.png) | ![preview 3](60/preview_3.png) | ![preview 4](60/preview_4.png) | ![preview 5](60/preview_5.png) | ![preview 6](60/preview_6.png) | ![preview 7](60/preview_7.png) | ![preview 8](60/preview_8.png) | | 61 | 41 | [Download](61/dataset.zip) | ![preview 1](61/preview_1.png) | ![preview 2](61/preview_2.png) | ![preview 3](61/preview_3.png) | ![preview 4](61/preview_4.png) | ![preview 5](61/preview_5.png) | ![preview 6](61/preview_6.png) | ![preview 7](61/preview_7.png) | ![preview 8](61/preview_8.png) | | 62 | 12 | [Download](62/dataset.zip) | ![preview 1](62/preview_1.png) | ![preview 2](62/preview_2.png) | ![preview 3](62/preview_3.png) | ![preview 4](62/preview_4.png) | ![preview 5](62/preview_5.png) | ![preview 6](62/preview_6.png) | ![preview 7](62/preview_7.png) | ![preview 8](62/preview_8.png) | | 63 | 7 | [Download](63/dataset.zip) | ![preview 1](63/preview_1.png) | ![preview 2](63/preview_2.png) | ![preview 3](63/preview_3.png) | ![preview 4](63/preview_4.png) | ![preview 5](63/preview_5.png) | ![preview 6](63/preview_6.png) | ![preview 7](63/preview_7.png) | N/A | | 64 | 8 | [Download](64/dataset.zip) | ![preview 1](64/preview_1.png) | ![preview 2](64/preview_2.png) | ![preview 3](64/preview_3.png) | ![preview 4](64/preview_4.png) | ![preview 5](64/preview_5.png) | ![preview 6](64/preview_6.png) | ![preview 7](64/preview_7.png) | ![preview 8](64/preview_8.png) | | noise | 322 | [Download](-1/dataset.zip) | ![preview 1](-1/preview_1.png) | ![preview 2](-1/preview_2.png) | ![preview 3](-1/preview_3.png) | ![preview 4](-1/preview_4.png) | ![preview 5](-1/preview_5.png) | ![preview 6](-1/preview_6.png) | ![preview 7](-1/preview_7.png) | ![preview 8](-1/preview_8.png) |
BangumiBase/monogatariseries
[ "size_categories:1K<n<10K", "license:mit", "art", "region:us" ]
2023-10-03T17:18:00+00:00
{"license": "mit", "size_categories": ["1K<n<10K"], "tags": ["art"]}
2023-10-03T22:22:55+00:00
[]
[]
TAGS #size_categories-1K<n<10K #license-mit #art #region-us
Bangumi Image Base of Monogatari Series ======================================= This is the image base of bangumi Monogatari Series, we detected 66 characters, 8964 images in total. The full dataset is here. Please note that these image bases are not guaranteed to be 100% cleaned, they may be noisy actual. If you intend to manually train models using this dataset, we recommend performing necessary preprocessing on the downloaded dataset to eliminate potential noisy samples (approximately 1% probability). Here is the characters' preview:
[]
[ "TAGS\n#size_categories-1K<n<10K #license-mit #art #region-us \n" ]
[ 25 ]
[ "passage: TAGS\n#size_categories-1K<n<10K #license-mit #art #region-us \n" ]
1e67917ce59b3a12d2a4bad844ab4b579624de6b
# Dataset Card for "sales-conversations-instruction_ext" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
goendalf666/sales-conversations-instruction-ext
[ "region:us" ]
2023-10-03T17:20:14+00:00
{"dataset_info": {"features": [{"name": "0", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 28036745, "num_examples": 20940}], "download_size": 4782593, "dataset_size": 28036745}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-03T17:20:18+00:00
[]
[]
TAGS #region-us
# Dataset Card for "sales-conversations-instruction_ext" More Information needed
[ "# Dataset Card for \"sales-conversations-instruction_ext\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"sales-conversations-instruction_ext\"\n\nMore Information needed" ]
[ 6, 21 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"sales-conversations-instruction_ext\"\n\nMore Information needed" ]
e43057a2ca425febd22fb8bbef3bb78ee96875f5
# Dataset Card for "govreport-qa-5-4096" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
shossain/govreport-qa-5-4096
[ "region:us" ]
2023-10-03T17:23:54+00:00
{"dataset_info": {"features": [{"name": "input_ids", "sequence": "int32"}, {"name": "attention_mask", "sequence": "int8"}, {"name": "labels", "sequence": "int64"}], "splits": [{"name": "train", "num_bytes": 266300, "num_examples": 5}], "download_size": 71798, "dataset_size": 266300}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-03T18:36:20+00:00
[]
[]
TAGS #region-us
# Dataset Card for "govreport-qa-5-4096" More Information needed
[ "# Dataset Card for \"govreport-qa-5-4096\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"govreport-qa-5-4096\"\n\nMore Information needed" ]
[ 6, 17 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"govreport-qa-5-4096\"\n\nMore Information needed" ]
8ad161962ad2a4be83fba0709703678ee37e4418
# Dataset Card for "programming_books_llama" 400M tokens of programming books generated by gpt-3.5 (70M tokens) and a finetuned codellama 34b. The gpt-3.5 data is extremely high quality. The llama data has lower quality and shorter length, but is still good. This was generated with the [textbook quality](https://github.com/VikParuchuri/textbook_quality) repo.
open-phi/programming_books_llama
[ "region:us" ]
2023-10-03T17:27:59+00:00
{"dataset_info": {"features": [{"name": "topic", "dtype": "string"}, {"name": "outline", "sequence": "string"}, {"name": "concepts", "sequence": "string"}, {"name": "queries", "sequence": "string"}, {"name": "context", "sequence": "string"}, {"name": "markdown", "dtype": "string"}, {"name": "model", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 1677240291, "num_examples": 111048}], "download_size": 631279270, "dataset_size": 1677240291}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-04T17:02:56+00:00
[]
[]
TAGS #region-us
# Dataset Card for "programming_books_llama" 400M tokens of programming books generated by gpt-3.5 (70M tokens) and a finetuned codellama 34b. The gpt-3.5 data is extremely high quality. The llama data has lower quality and shorter length, but is still good. This was generated with the textbook quality repo.
[ "# Dataset Card for \"programming_books_llama\"\n\n400M tokens of programming books generated by gpt-3.5 (70M tokens) and a finetuned codellama 34b. The gpt-3.5 data is extremely high quality. The llama data has lower quality and shorter length, but is still good. This was generated with the textbook quality repo." ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"programming_books_llama\"\n\n400M tokens of programming books generated by gpt-3.5 (70M tokens) and a finetuned codellama 34b. The gpt-3.5 data is extremely high quality. The llama data has lower quality and shorter length, but is still good. This was generated with the textbook quality repo." ]
[ 6, 84 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"programming_books_llama\"\n\n400M tokens of programming books generated by gpt-3.5 (70M tokens) and a finetuned codellama 34b. The gpt-3.5 data is extremely high quality. The llama data has lower quality and shorter length, but is still good. This was generated with the textbook quality repo." ]
22e5ec1e3b37df112fef5b62780b4796b6c77009
# Dataset Card for "empathetic_dialogues_instruction" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Dong237/empathetic_dialogues_instruction
[ "region:us" ]
2023-10-03T17:30:43+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "instruction", "dtype": "string"}, {"name": "dialogue", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 6392746, "num_examples": 17780}, {"name": "validation", "num_bytes": 1076044, "num_examples": 2758}, {"name": "test", "num_bytes": 1037401, "num_examples": 2540}], "download_size": 4612892, "dataset_size": 8506191}}
2023-10-03T17:30:50+00:00
[]
[]
TAGS #region-us
# Dataset Card for "empathetic_dialogues_instruction" More Information needed
[ "# Dataset Card for \"empathetic_dialogues_instruction\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"empathetic_dialogues_instruction\"\n\nMore Information needed" ]
[ 6, 19 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"empathetic_dialogues_instruction\"\n\nMore Information needed" ]
02c33ca1e2b94243c6e04fae27f47bc1332195f1
# Dataset Card for Evaluation run of Undi95/U-Amethyst-20B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Undi95/U-Amethyst-20B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Undi95/U-Amethyst-20B](https://huggingface.co/Undi95/U-Amethyst-20B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 3 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Undi95__U-Amethyst-20B_public", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-11-07T19:04:15.043213](https://huggingface.co/datasets/open-llm-leaderboard/details_Undi95__U-Amethyst-20B_public/blob/main/results_2023-11-07T19-04-15.043213.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.10622902684563758, "em_stderr": 0.003155544985138621, "f1": 0.18749056208053588, "f1_stderr": 0.003364638518499387, "acc": 0.3974902658192778, "acc_stderr": 0.009236573846305381 }, "harness|drop|3": { "em": 0.10622902684563758, "em_stderr": 0.003155544985138621, "f1": 0.18749056208053588, "f1_stderr": 0.003364638518499387 }, "harness|gsm8k|5": { "acc": 0.05307050796057619, "acc_stderr": 0.0061748688586383774 }, "harness|winogrande|5": { "acc": 0.7419100236779794, "acc_stderr": 0.012298278833972387 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_Undi95__U-Amethyst-20B
[ "region:us" ]
2023-10-03T17:44:31+00:00
{"pretty_name": "Evaluation run of Undi95/U-Amethyst-20B", "dataset_summary": "Dataset automatically created during the evaluation run of model [Undi95/U-Amethyst-20B](https://huggingface.co/Undi95/U-Amethyst-20B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Undi95__U-Amethyst-20B_public\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-11-07T19:04:15.043213](https://huggingface.co/datasets/open-llm-leaderboard/details_Undi95__U-Amethyst-20B_public/blob/main/results_2023-11-07T19-04-15.043213.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.10622902684563758,\n \"em_stderr\": 0.003155544985138621,\n \"f1\": 0.18749056208053588,\n \"f1_stderr\": 0.003364638518499387,\n \"acc\": 0.3974902658192778,\n \"acc_stderr\": 0.009236573846305381\n },\n \"harness|drop|3\": {\n \"em\": 0.10622902684563758,\n \"em_stderr\": 0.003155544985138621,\n \"f1\": 0.18749056208053588,\n \"f1_stderr\": 0.003364638518499387\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.05307050796057619,\n \"acc_stderr\": 0.0061748688586383774\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7419100236779794,\n \"acc_stderr\": 0.012298278833972387\n }\n}\n```", "repo_url": "https://huggingface.co/Undi95/U-Amethyst-20B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_drop_3", "data_files": [{"split": "2023_11_07T19_04_15.043213", "path": ["**/details_harness|drop|3_2023-11-07T19-04-15.043213.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-11-07T19-04-15.043213.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_11_07T19_04_15.043213", "path": ["**/details_harness|gsm8k|5_2023-11-07T19-04-15.043213.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-11-07T19-04-15.043213.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_11_07T19_04_15.043213", "path": ["**/details_harness|winogrande|5_2023-11-07T19-04-15.043213.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-11-07T19-04-15.043213.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_11_07T19_04_15.043213", "path": ["results_2023-11-07T19-04-15.043213.parquet"]}, {"split": "latest", "path": ["results_2023-11-07T19-04-15.043213.parquet"]}]}]}
2023-12-01T14:44:08+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Undi95/U-Amethyst-20B ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model Undi95/U-Amethyst-20B on the Open LLM Leaderboard. The dataset is composed of 3 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-11-07T19:04:15.043213(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of Undi95/U-Amethyst-20B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Undi95/U-Amethyst-20B on the Open LLM Leaderboard.\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-11-07T19:04:15.043213(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Undi95/U-Amethyst-20B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Undi95/U-Amethyst-20B on the Open LLM Leaderboard.\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-11-07T19:04:15.043213(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 20, 31, 169, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Undi95/U-Amethyst-20B## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model Undi95/U-Amethyst-20B on the Open LLM Leaderboard.\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-11-07T19:04:15.043213(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
c6a0c2a5e58f2914bdb74dd5dee36f76df64b7f0
## Overview This dataset is mostly a continuation of https://hf.co/datasets/jondurbin/airoboros-2.1, with some notable additions and fixes. - Some of the content is "toxic"/"harmful", and contains profanity and other types of sensitive content. - None of the content or views contained in text within this dataset necessarily align with my personal beliefs or opinions, they are simply text generated by LLMs and/or scraped from the web. - Use with caution, particularly in locations with less-than-free speech laws. - You, and you alone are responsible for having downloaded the dataset and having a copy of the contents therein and I am completely indemnified from any and all liabilities. ### 2.1 Contamination I accidentally included some of the benchmark data in the first version of the airboros-2.1 model, which is why it had a crazy high truthfulqa score. Discussions here: - https://huggingface.co/jondurbin/airoboros-l2-70b-2.1/discussions/3#64f325ce352152814d1f796a - https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard/discussions/225#64f0997659da193a12b78c32 I flagged it for removal and recreated the model right away, but the leaderboard cached the old results so it took some time to reflect. Some of the instructors I use create overlapping data, and it's hard to filter, especially since the instructions aren't typically verbatim with the benchmark questions. This time around, I used `thenlper/gte-small` to calculate embeddings of the instructions, along with a faiss index, and removed anything from the dataset that had a similarity score < 0.15 (from truthfulqa). If you have a better way of checking, please let me know! I haven't done the same for most other benchmarks (yet) because there are hundreds of thousands of instructions and it would be pretty computationally expensive to do. That said, I only have ~1279 multiple choice questions, all randomly GPT generated, so there's probably little-to-no overlap. ### Awareness I added a new "awareness" instructor, which aims to add a lot more nuance to responses relating to time, location, senses, etc. based on the system prompt. For example, if you are using the standard prompt with user/assistant, and ask how long it would take to get to Chicago, the answer will be something about AI not having a physical presence. If, on the other hand, you are using a system prompt with a human character specified, the model attempts to infer location from "home" and will provide a more nuanced answer as a human would (in theory). https://github.com/jondurbin/airoboros/commit/e91562c88d7610edb051606622e7c25a99884f7e ### Editor I created a text edit instructor as well, which uses a reverse prompt mechanism, meaning it takes the existing writing samples that have been generated, rewrites them to have misspellings, poor grammar, etc., then uses a prompt like "Please correct and improve the text." with the original well-written text and target output. https://github.com/jondurbin/airoboros/commit/e60a68de5f9622320c9cfff3b238bd83cc7e373b ### Writing I regenerated (almost) all of the training data that included "Once upon a time..." because it's too cliche and boring. ### Multiple choice I created many more multiple choice questions, many of which have additional text context. ### Roleplay/conversation I re-created all of the GTKM and RP datasets this time around, removing all of the "USER: " and "ASSISTANT: " prefixes from the instructions/responses, so it's more compatible with existing interfaces. The GTKM instructor now does the same thing as RP, in that it saves each round of "conversation" as a separate row in the output - previously it only saved the final response, which may not have been sufficient since I don't typically train on inputs. ### UTF-8 to ASCII I replaced most of the "standard" utf-8 sequences - left double quote, right double quote, left apostraphe, ellipses - with standard ascii characters. I don't know if this was contributing to part of the issue with eos tokens being produced after apostraphes, but I figured it was worth trying. ### Summarization I also included 500 examples from: https://hf.co/datasets/mattpscott/airoboros-summarization These are existing summarizarions from various public datasets, formatted to airoboros style contextual qa. Thanks Matt! ### Usage/license info Much (most) of the data was generated via gpt-4 API calls, which has a restriction in the ToS about "competing" models. Please seek legal advice if you plan to build or use a model that includes this dataset in a commercial setting.
jondurbin/airoboros-2.2
[ "license:other", "region:us" ]
2023-10-03T17:46:53+00:00
{"license": "other"}
2023-10-03T18:01:21+00:00
[]
[]
TAGS #license-other #region-us
## Overview This dataset is mostly a continuation of URL with some notable additions and fixes. - Some of the content is "toxic"/"harmful", and contains profanity and other types of sensitive content. - None of the content or views contained in text within this dataset necessarily align with my personal beliefs or opinions, they are simply text generated by LLMs and/or scraped from the web. - Use with caution, particularly in locations with less-than-free speech laws. - You, and you alone are responsible for having downloaded the dataset and having a copy of the contents therein and I am completely indemnified from any and all liabilities. ### 2.1 Contamination I accidentally included some of the benchmark data in the first version of the airboros-2.1 model, which is why it had a crazy high truthfulqa score. Discussions here: - URL - URL I flagged it for removal and recreated the model right away, but the leaderboard cached the old results so it took some time to reflect. Some of the instructors I use create overlapping data, and it's hard to filter, especially since the instructions aren't typically verbatim with the benchmark questions. This time around, I used 'thenlper/gte-small' to calculate embeddings of the instructions, along with a faiss index, and removed anything from the dataset that had a similarity score < 0.15 (from truthfulqa). If you have a better way of checking, please let me know! I haven't done the same for most other benchmarks (yet) because there are hundreds of thousands of instructions and it would be pretty computationally expensive to do. That said, I only have ~1279 multiple choice questions, all randomly GPT generated, so there's probably little-to-no overlap. ### Awareness I added a new "awareness" instructor, which aims to add a lot more nuance to responses relating to time, location, senses, etc. based on the system prompt. For example, if you are using the standard prompt with user/assistant, and ask how long it would take to get to Chicago, the answer will be something about AI not having a physical presence. If, on the other hand, you are using a system prompt with a human character specified, the model attempts to infer location from "home" and will provide a more nuanced answer as a human would (in theory). URL ### Editor I created a text edit instructor as well, which uses a reverse prompt mechanism, meaning it takes the existing writing samples that have been generated, rewrites them to have misspellings, poor grammar, etc., then uses a prompt like "Please correct and improve the text." with the original well-written text and target output. URL ### Writing I regenerated (almost) all of the training data that included "Once upon a time..." because it's too cliche and boring. ### Multiple choice I created many more multiple choice questions, many of which have additional text context. ### Roleplay/conversation I re-created all of the GTKM and RP datasets this time around, removing all of the "USER: " and "ASSISTANT: " prefixes from the instructions/responses, so it's more compatible with existing interfaces. The GTKM instructor now does the same thing as RP, in that it saves each round of "conversation" as a separate row in the output - previously it only saved the final response, which may not have been sufficient since I don't typically train on inputs. ### UTF-8 to ASCII I replaced most of the "standard" utf-8 sequences - left double quote, right double quote, left apostraphe, ellipses - with standard ascii characters. I don't know if this was contributing to part of the issue with eos tokens being produced after apostraphes, but I figured it was worth trying. ### Summarization I also included 500 examples from: URL These are existing summarizarions from various public datasets, formatted to airoboros style contextual qa. Thanks Matt! ### Usage/license info Much (most) of the data was generated via gpt-4 API calls, which has a restriction in the ToS about "competing" models. Please seek legal advice if you plan to build or use a model that includes this dataset in a commercial setting.
[ "## Overview\n\nThis dataset is mostly a continuation of URL with some notable additions and fixes.\n\n- Some of the content is \"toxic\"/\"harmful\", and contains profanity and other types of sensitive content.\n- None of the content or views contained in text within this dataset necessarily align with my personal beliefs or opinions, they are simply text generated by LLMs and/or scraped from the web.\n- Use with caution, particularly in locations with less-than-free speech laws.\n- You, and you alone are responsible for having downloaded the dataset and having a copy of the contents therein and I am completely indemnified from any and all liabilities.", "### 2.1 Contamination\n\nI accidentally included some of the benchmark data in the first version of the airboros-2.1 model, which is why it had a crazy high truthfulqa score. Discussions here:\n- URL\n- URL\n\nI flagged it for removal and recreated the model right away, but the leaderboard cached the old results so it took some time to reflect.\n\nSome of the instructors I use create overlapping data, and it's hard to filter, especially since the instructions aren't typically verbatim with the benchmark questions.\n\nThis time around, I used 'thenlper/gte-small' to calculate embeddings of the instructions, along with a faiss index, and removed anything from the dataset that had a similarity score < 0.15 (from truthfulqa). If you have a better way of checking, please let me know!\n\nI haven't done the same for most other benchmarks (yet) because there are hundreds of thousands of instructions and it would be pretty computationally expensive to do. That said, I only have ~1279 multiple choice questions, all randomly GPT generated, so there's probably little-to-no overlap.", "### Awareness\n\nI added a new \"awareness\" instructor, which aims to add a lot more nuance to responses relating to time, location, senses, etc. based on the system prompt.\n\nFor example, if you are using the standard prompt with user/assistant, and ask how long it would take to get to Chicago, the answer will be something about AI not having a physical presence.\nIf, on the other hand, you are using a system prompt with a human character specified, the model attempts to infer location from \"home\" and will provide a more nuanced answer as a human would (in theory).\n\nURL", "### Editor\n\nI created a text edit instructor as well, which uses a reverse prompt mechanism, meaning it takes the existing writing samples that have been generated, rewrites them to have misspellings, poor grammar, etc., then uses a prompt like \"Please correct and improve the text.\" with the original well-written text and target output.\n\nURL", "### Writing\n\nI regenerated (almost) all of the training data that included \"Once upon a time...\" because it's too cliche and boring.", "### Multiple choice\n\nI created many more multiple choice questions, many of which have additional text context.", "### Roleplay/conversation\n\nI re-created all of the GTKM and RP datasets this time around, removing all of the \"USER: \" and \"ASSISTANT: \" prefixes from the instructions/responses, so it's more compatible with existing interfaces.\n\nThe GTKM instructor now does the same thing as RP, in that it saves each round of \"conversation\" as a separate row in the output - previously it only saved the final response, which may not have been sufficient since I don't typically train on inputs.", "### UTF-8 to ASCII\n\nI replaced most of the \"standard\" utf-8 sequences - left double quote, right double quote, left apostraphe, ellipses - with standard ascii characters. I don't know if this was contributing to part of the issue with eos tokens being produced after apostraphes, but I figured it was worth trying.", "### Summarization\n\nI also included 500 examples from:\nURL\n\nThese are existing summarizarions from various public datasets, formatted to airoboros style contextual qa.\n\nThanks Matt!", "### Usage/license info\n\nMuch (most) of the data was generated via gpt-4 API calls, which has a restriction in the ToS about \"competing\" models. Please seek legal advice if you plan to build or use a model that includes this dataset in a commercial setting." ]
[ "TAGS\n#license-other #region-us \n", "## Overview\n\nThis dataset is mostly a continuation of URL with some notable additions and fixes.\n\n- Some of the content is \"toxic\"/\"harmful\", and contains profanity and other types of sensitive content.\n- None of the content or views contained in text within this dataset necessarily align with my personal beliefs or opinions, they are simply text generated by LLMs and/or scraped from the web.\n- Use with caution, particularly in locations with less-than-free speech laws.\n- You, and you alone are responsible for having downloaded the dataset and having a copy of the contents therein and I am completely indemnified from any and all liabilities.", "### 2.1 Contamination\n\nI accidentally included some of the benchmark data in the first version of the airboros-2.1 model, which is why it had a crazy high truthfulqa score. Discussions here:\n- URL\n- URL\n\nI flagged it for removal and recreated the model right away, but the leaderboard cached the old results so it took some time to reflect.\n\nSome of the instructors I use create overlapping data, and it's hard to filter, especially since the instructions aren't typically verbatim with the benchmark questions.\n\nThis time around, I used 'thenlper/gte-small' to calculate embeddings of the instructions, along with a faiss index, and removed anything from the dataset that had a similarity score < 0.15 (from truthfulqa). If you have a better way of checking, please let me know!\n\nI haven't done the same for most other benchmarks (yet) because there are hundreds of thousands of instructions and it would be pretty computationally expensive to do. That said, I only have ~1279 multiple choice questions, all randomly GPT generated, so there's probably little-to-no overlap.", "### Awareness\n\nI added a new \"awareness\" instructor, which aims to add a lot more nuance to responses relating to time, location, senses, etc. based on the system prompt.\n\nFor example, if you are using the standard prompt with user/assistant, and ask how long it would take to get to Chicago, the answer will be something about AI not having a physical presence.\nIf, on the other hand, you are using a system prompt with a human character specified, the model attempts to infer location from \"home\" and will provide a more nuanced answer as a human would (in theory).\n\nURL", "### Editor\n\nI created a text edit instructor as well, which uses a reverse prompt mechanism, meaning it takes the existing writing samples that have been generated, rewrites them to have misspellings, poor grammar, etc., then uses a prompt like \"Please correct and improve the text.\" with the original well-written text and target output.\n\nURL", "### Writing\n\nI regenerated (almost) all of the training data that included \"Once upon a time...\" because it's too cliche and boring.", "### Multiple choice\n\nI created many more multiple choice questions, many of which have additional text context.", "### Roleplay/conversation\n\nI re-created all of the GTKM and RP datasets this time around, removing all of the \"USER: \" and \"ASSISTANT: \" prefixes from the instructions/responses, so it's more compatible with existing interfaces.\n\nThe GTKM instructor now does the same thing as RP, in that it saves each round of \"conversation\" as a separate row in the output - previously it only saved the final response, which may not have been sufficient since I don't typically train on inputs.", "### UTF-8 to ASCII\n\nI replaced most of the \"standard\" utf-8 sequences - left double quote, right double quote, left apostraphe, ellipses - with standard ascii characters. I don't know if this was contributing to part of the issue with eos tokens being produced after apostraphes, but I figured it was worth trying.", "### Summarization\n\nI also included 500 examples from:\nURL\n\nThese are existing summarizarions from various public datasets, formatted to airoboros style contextual qa.\n\nThanks Matt!", "### Usage/license info\n\nMuch (most) of the data was generated via gpt-4 API calls, which has a restriction in the ToS about \"competing\" models. Please seek legal advice if you plan to build or use a model that includes this dataset in a commercial setting." ]
[ 11, 155, 256, 136, 79, 36, 21, 126, 83, 42, 67 ]
[ "passage: TAGS\n#license-other #region-us \n## Overview\n\nThis dataset is mostly a continuation of URL with some notable additions and fixes.\n\n- Some of the content is \"toxic\"/\"harmful\", and contains profanity and other types of sensitive content.\n- None of the content or views contained in text within this dataset necessarily align with my personal beliefs or opinions, they are simply text generated by LLMs and/or scraped from the web.\n- Use with caution, particularly in locations with less-than-free speech laws.\n- You, and you alone are responsible for having downloaded the dataset and having a copy of the contents therein and I am completely indemnified from any and all liabilities.### 2.1 Contamination\n\nI accidentally included some of the benchmark data in the first version of the airboros-2.1 model, which is why it had a crazy high truthfulqa score. Discussions here:\n- URL\n- URL\n\nI flagged it for removal and recreated the model right away, but the leaderboard cached the old results so it took some time to reflect.\n\nSome of the instructors I use create overlapping data, and it's hard to filter, especially since the instructions aren't typically verbatim with the benchmark questions.\n\nThis time around, I used 'thenlper/gte-small' to calculate embeddings of the instructions, along with a faiss index, and removed anything from the dataset that had a similarity score < 0.15 (from truthfulqa). If you have a better way of checking, please let me know!\n\nI haven't done the same for most other benchmarks (yet) because there are hundreds of thousands of instructions and it would be pretty computationally expensive to do. That said, I only have ~1279 multiple choice questions, all randomly GPT generated, so there's probably little-to-no overlap." ]
810d773f54d632ea64963bb6a1ddbe3de843fe9b
# Dataset Card for Evaluation run of Yukang/Llama-2-13b-chat-longlora-32k-sft ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Yukang/Llama-2-13b-chat-longlora-32k-sft - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Yukang/Llama-2-13b-chat-longlora-32k-sft](https://huggingface.co/Yukang/Llama-2-13b-chat-longlora-32k-sft) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Yukang__Llama-2-13b-chat-longlora-32k-sft", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-29T02:16:35.328850](https://huggingface.co/datasets/open-llm-leaderboard/details_Yukang__Llama-2-13b-chat-longlora-32k-sft/blob/main/results_2023-10-29T02-16-35.328850.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.17051174496644295, "em_stderr": 0.003851429222727117, "f1": 0.23656669463087293, "f1_stderr": 0.003934121554985558, "acc": 0.32044198895027626, "acc_stderr": 0.006741557601060113 }, "harness|drop|3": { "em": 0.17051174496644295, "em_stderr": 0.003851429222727117, "f1": 0.23656669463087293, "f1_stderr": 0.003934121554985558 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 }, "harness|winogrande|5": { "acc": 0.6408839779005525, "acc_stderr": 0.013483115202120225 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_Yukang__Llama-2-13b-chat-longlora-32k-sft
[ "region:us" ]
2023-10-03T18:02:16+00:00
{"pretty_name": "Evaluation run of Yukang/Llama-2-13b-chat-longlora-32k-sft", "dataset_summary": "Dataset automatically created during the evaluation run of model [Yukang/Llama-2-13b-chat-longlora-32k-sft](https://huggingface.co/Yukang/Llama-2-13b-chat-longlora-32k-sft) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Yukang__Llama-2-13b-chat-longlora-32k-sft\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-29T02:16:35.328850](https://huggingface.co/datasets/open-llm-leaderboard/details_Yukang__Llama-2-13b-chat-longlora-32k-sft/blob/main/results_2023-10-29T02-16-35.328850.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.17051174496644295,\n \"em_stderr\": 0.003851429222727117,\n \"f1\": 0.23656669463087293,\n \"f1_stderr\": 0.003934121554985558,\n \"acc\": 0.32044198895027626,\n \"acc_stderr\": 0.006741557601060113\n },\n \"harness|drop|3\": {\n \"em\": 0.17051174496644295,\n \"em_stderr\": 0.003851429222727117,\n \"f1\": 0.23656669463087293,\n \"f1_stderr\": 0.003934121554985558\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6408839779005525,\n \"acc_stderr\": 0.013483115202120225\n }\n}\n```", "repo_url": "https://huggingface.co/Yukang/Llama-2-13b-chat-longlora-32k-sft", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|arc:challenge|25_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|arc:challenge|25_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_27T06_30_00.713733", "path": ["**/details_harness|drop|3_2023-10-27T06-30-00.713733.parquet"]}, {"split": "2023_10_29T02_16_35.328850", "path": ["**/details_harness|drop|3_2023-10-29T02-16-35.328850.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-29T02-16-35.328850.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_27T06_30_00.713733", "path": ["**/details_harness|gsm8k|5_2023-10-27T06-30-00.713733.parquet"]}, {"split": "2023_10_29T02_16_35.328850", "path": ["**/details_harness|gsm8k|5_2023-10-29T02-16-35.328850.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-29T02-16-35.328850.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hellaswag|10_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hellaswag|10_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T19-01-52.732036.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T19-09-03.932151.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T19-09-03.932151.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T19-09-03.932151.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_27T06_30_00.713733", "path": ["**/details_harness|winogrande|5_2023-10-27T06-30-00.713733.parquet"]}, {"split": "2023_10_29T02_16_35.328850", "path": ["**/details_harness|winogrande|5_2023-10-29T02-16-35.328850.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-29T02-16-35.328850.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T19_01_52.732036", "path": ["results_2023-10-03T19-01-52.732036.parquet"]}, {"split": "2023_10_03T19_09_03.932151", "path": ["results_2023-10-03T19-09-03.932151.parquet"]}, {"split": "2023_10_27T06_30_00.713733", "path": ["results_2023-10-27T06-30-00.713733.parquet"]}, {"split": "2023_10_29T02_16_35.328850", "path": ["results_2023-10-29T02-16-35.328850.parquet"]}, {"split": "latest", "path": ["results_2023-10-29T02-16-35.328850.parquet"]}]}]}
2023-10-29T02:16:47+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Yukang/Llama-2-13b-chat-longlora-32k-sft ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model Yukang/Llama-2-13b-chat-longlora-32k-sft on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-29T02:16:35.328850(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of Yukang/Llama-2-13b-chat-longlora-32k-sft", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Yukang/Llama-2-13b-chat-longlora-32k-sft on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-29T02:16:35.328850(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Yukang/Llama-2-13b-chat-longlora-32k-sft", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Yukang/Llama-2-13b-chat-longlora-32k-sft on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-29T02:16:35.328850(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 28, 31, 176, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Yukang/Llama-2-13b-chat-longlora-32k-sft## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model Yukang/Llama-2-13b-chat-longlora-32k-sft on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-29T02:16:35.328850(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
8b960169b6967a0abe3d44ac8d1770aec6f61c85
# chico2prompts There are 2 files, they follow two different prompts. They are in 2 different csv files in Spanish. # Prompts First prompt: Suggest a title for the following. In english: ``` Suggest a title for the following story: {{contents}} completion: Sure, here's a suitable title for the given story {{titles}}. ``` In spanish: ``` Sugiere un título para la siguiente historia: {{contents}} Completado por lo siguiente: Un título posible para la siguiente historia podría ser: {{titles}} ``` Second prompt: Write a short story In english: ``` prompt: Write a short story based on the following title: {{titles}} completion: {{contents}} ``` In spanish: ``` prompt: Escribe una historia corta basada en el siguiente título {{titles}} completion: {{contents}} ``` This dataset is a sub-version of the original [chico dataset](https://huggingface.co/datasets/snats/chico).
snats/chico2prompts
[ "license:cc-by-4.0", "region:us" ]
2023-10-03T18:02:43+00:00
{"license": "cc-by-4.0"}
2023-11-21T22:31:10+00:00
[]
[]
TAGS #license-cc-by-4.0 #region-us
# chico2prompts There are 2 files, they follow two different prompts. They are in 2 different csv files in Spanish. # Prompts First prompt: Suggest a title for the following. In english: In spanish: Second prompt: Write a short story In english: In spanish: This dataset is a sub-version of the original chico dataset.
[ "# chico2prompts\n\nThere are 2 files, they follow two different prompts. They are in 2 different csv files in Spanish.", "# Prompts\n\nFirst prompt: Suggest a title for the following.\n\nIn english:\n\n\nIn spanish:\n\n\nSecond prompt: Write a short story\nIn english:\n\n\n\nIn spanish:\n\n\n\nThis dataset is a sub-version of the original chico dataset." ]
[ "TAGS\n#license-cc-by-4.0 #region-us \n", "# chico2prompts\n\nThere are 2 files, they follow two different prompts. They are in 2 different csv files in Spanish.", "# Prompts\n\nFirst prompt: Suggest a title for the following.\n\nIn english:\n\n\nIn spanish:\n\n\nSecond prompt: Write a short story\nIn english:\n\n\n\nIn spanish:\n\n\n\nThis dataset is a sub-version of the original chico dataset." ]
[ 15, 30, 54 ]
[ "passage: TAGS\n#license-cc-by-4.0 #region-us \n# chico2prompts\n\nThere are 2 files, they follow two different prompts. They are in 2 different csv files in Spanish.# Prompts\n\nFirst prompt: Suggest a title for the following.\n\nIn english:\n\n\nIn spanish:\n\n\nSecond prompt: Write a short story\nIn english:\n\n\n\nIn spanish:\n\n\n\nThis dataset is a sub-version of the original chico dataset." ]
c75da72ba7fb3b234f2aa4152e299a1e0e4c6c3c
# Dataset Card for "Melanoma_Train" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
MegPaulson/Melanoma_Train
[ "region:us" ]
2023-10-03T18:04:46+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "image_seg", "dtype": "image"}, {"name": "prompt", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 35945944.0, "num_examples": 26}], "download_size": 1333203, "dataset_size": 35945944.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-03T21:33:26+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Melanoma_Train" More Information needed
[ "# Dataset Card for \"Melanoma_Train\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Melanoma_Train\"\n\nMore Information needed" ]
[ 6, 16 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"Melanoma_Train\"\n\nMore Information needed" ]
463761b960238e29021c863f83a277f280057a82
Stack-v2 Python data. Deduped, filtered, and decontaminated. Includes permissively-licensed, and no-license data. Non-permissive data is excluded.
RaymondLi/the_stack_v2_python
[ "region:us" ]
2023-10-03T18:05:40+00:00
{}
2023-10-03T18:07:01+00:00
[]
[]
TAGS #region-us
Stack-v2 Python data. Deduped, filtered, and decontaminated. Includes permissively-licensed, and no-license data. Non-permissive data is excluded.
[]
[ "TAGS\n#region-us \n" ]
[ 6 ]
[ "passage: TAGS\n#region-us \n" ]
6047a54ced8d5fc182843ca9be68783215971ebe
# Dataset Card for "es_lawyer_instruct" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Intuit-GenSRF/es_lawyer_instruct
[ "region:us" ]
2023-10-03T18:14:31+00:00
{"dataset_info": {"features": [{"name": "instruction", "dtype": "string"}, {"name": "output", "dtype": "string"}, {"name": "input", "dtype": "float64"}, {"name": "split", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "text_spanish", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 16852186, "num_examples": 9241}], "download_size": 7403208, "dataset_size": 16852186}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-03T18:14:34+00:00
[]
[]
TAGS #region-us
# Dataset Card for "es_lawyer_instruct" More Information needed
[ "# Dataset Card for \"es_lawyer_instruct\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"es_lawyer_instruct\"\n\nMore Information needed" ]
[ 6, 17 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"es_lawyer_instruct\"\n\nMore Information needed" ]
e6d62cc6fd2138f2bc99af0454eeb7173c631aac
# Dataset Card for "es_mmlu_law" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Intuit-GenSRF/es_mmlu_law
[ "region:us" ]
2023-10-03T18:15:34+00:00
{"dataset_info": {"features": [{"name": "question", "dtype": "string"}, {"name": "choices", "dtype": "string"}, {"name": "answer", "dtype": "int64"}, {"name": "negate_openai_prompt", "dtype": "string"}, {"name": "neg_question", "dtype": "string"}, {"name": "fewshot_context", "dtype": "string"}, {"name": "split", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "text_spanish", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 11082953, "num_examples": 1539}], "download_size": 3462359, "dataset_size": 11082953}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-03T18:15:36+00:00
[]
[]
TAGS #region-us
# Dataset Card for "es_mmlu_law" More Information needed
[ "# Dataset Card for \"es_mmlu_law\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"es_mmlu_law\"\n\nMore Information needed" ]
[ 6, 16 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"es_mmlu_law\"\n\nMore Information needed" ]
c1dc9b6a0e17a2828eac19cc1986136bd90becbc
# Dataset Card for "es_legal_advice_reddit" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Intuit-GenSRF/es_legal_advice_reddit
[ "region:us" ]
2023-10-03T18:16:54+00:00
{"dataset_info": {"features": [{"name": "created_utc", "dtype": "int64"}, {"name": "full_link", "dtype": "string"}, {"name": "id", "dtype": "string"}, {"name": "body", "dtype": "string"}, {"name": "title", "dtype": "string"}, {"name": "text_label", "dtype": "string"}, {"name": "flair_label", "dtype": "int64"}, {"name": "split", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "text_spanish", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 404835305, "num_examples": 98910}], "download_size": 244411822, "dataset_size": 404835305}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-03T18:17:04+00:00
[]
[]
TAGS #region-us
# Dataset Card for "es_legal_advice_reddit" More Information needed
[ "# Dataset Card for \"es_legal_advice_reddit\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"es_legal_advice_reddit\"\n\nMore Information needed" ]
[ 6, 19 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"es_legal_advice_reddit\"\n\nMore Information needed" ]
d1ebcff3602532e12f7ef6dbc7f02127bac448f2
# Dataset Card for Evaluation run of dhmeltzer/Llama-2-7b-hf-eli5-cleaned-1024_qlora_merged ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/dhmeltzer/Llama-2-7b-hf-eli5-cleaned-1024_qlora_merged - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [dhmeltzer/Llama-2-7b-hf-eli5-cleaned-1024_qlora_merged](https://huggingface.co/dhmeltzer/Llama-2-7b-hf-eli5-cleaned-1024_qlora_merged) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_dhmeltzer__Llama-2-7b-hf-eli5-cleaned-1024_qlora_merged", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-25T08:47:01.240977](https://huggingface.co/datasets/open-llm-leaderboard/details_dhmeltzer__Llama-2-7b-hf-eli5-cleaned-1024_qlora_merged/blob/main/results_2023-10-25T08-47-01.240977.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.006082214765100671, "em_stderr": 0.0007962432393028668, "f1": 0.06534081375838918, "f1_stderr": 0.0015854305800075495, "acc": 0.3924845004077974, "acc_stderr": 0.00909442139378737 }, "harness|drop|3": { "em": 0.006082214765100671, "em_stderr": 0.0007962432393028668, "f1": 0.06534081375838918, "f1_stderr": 0.0015854305800075495 }, "harness|gsm8k|5": { "acc": 0.04700530705079606, "acc_stderr": 0.005829898355937179 }, "harness|winogrande|5": { "acc": 0.7379636937647988, "acc_stderr": 0.012358944431637557 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_dhmeltzer__Llama-2-7b-hf-eli5-cleaned-1024_qlora_merged
[ "region:us" ]
2023-10-03T18:18:33+00:00
{"pretty_name": "Evaluation run of dhmeltzer/Llama-2-7b-hf-eli5-cleaned-1024_qlora_merged", "dataset_summary": "Dataset automatically created during the evaluation run of model [dhmeltzer/Llama-2-7b-hf-eli5-cleaned-1024_qlora_merged](https://huggingface.co/dhmeltzer/Llama-2-7b-hf-eli5-cleaned-1024_qlora_merged) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_dhmeltzer__Llama-2-7b-hf-eli5-cleaned-1024_qlora_merged\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-25T08:47:01.240977](https://huggingface.co/datasets/open-llm-leaderboard/details_dhmeltzer__Llama-2-7b-hf-eli5-cleaned-1024_qlora_merged/blob/main/results_2023-10-25T08-47-01.240977.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.006082214765100671,\n \"em_stderr\": 0.0007962432393028668,\n \"f1\": 0.06534081375838918,\n \"f1_stderr\": 0.0015854305800075495,\n \"acc\": 0.3924845004077974,\n \"acc_stderr\": 0.00909442139378737\n },\n \"harness|drop|3\": {\n \"em\": 0.006082214765100671,\n \"em_stderr\": 0.0007962432393028668,\n \"f1\": 0.06534081375838918,\n \"f1_stderr\": 0.0015854305800075495\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.04700530705079606,\n \"acc_stderr\": 0.005829898355937179\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7379636937647988,\n \"acc_stderr\": 0.012358944431637557\n }\n}\n```", "repo_url": "https://huggingface.co/dhmeltzer/Llama-2-7b-hf-eli5-cleaned-1024_qlora_merged", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|arc:challenge|25_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_25T08_47_01.240977", "path": ["**/details_harness|drop|3_2023-10-25T08-47-01.240977.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-25T08-47-01.240977.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_25T08_47_01.240977", "path": ["**/details_harness|gsm8k|5_2023-10-25T08-47-01.240977.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-25T08-47-01.240977.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hellaswag|10_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T19-18-10.138787.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T19-18-10.138787.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T19-18-10.138787.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_25T08_47_01.240977", "path": ["**/details_harness|winogrande|5_2023-10-25T08-47-01.240977.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-25T08-47-01.240977.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T19_18_10.138787", "path": ["results_2023-10-03T19-18-10.138787.parquet"]}, {"split": "2023_10_25T08_47_01.240977", "path": ["results_2023-10-25T08-47-01.240977.parquet"]}, {"split": "latest", "path": ["results_2023-10-25T08-47-01.240977.parquet"]}]}]}
2023-10-25T07:47:13+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of dhmeltzer/Llama-2-7b-hf-eli5-cleaned-1024_qlora_merged ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model dhmeltzer/Llama-2-7b-hf-eli5-cleaned-1024_qlora_merged on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-25T08:47:01.240977(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of dhmeltzer/Llama-2-7b-hf-eli5-cleaned-1024_qlora_merged", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model dhmeltzer/Llama-2-7b-hf-eli5-cleaned-1024_qlora_merged on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-25T08:47:01.240977(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of dhmeltzer/Llama-2-7b-hf-eli5-cleaned-1024_qlora_merged", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model dhmeltzer/Llama-2-7b-hf-eli5-cleaned-1024_qlora_merged on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-25T08:47:01.240977(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 35, 31, 183, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of dhmeltzer/Llama-2-7b-hf-eli5-cleaned-1024_qlora_merged## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model dhmeltzer/Llama-2-7b-hf-eli5-cleaned-1024_qlora_merged on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-25T08:47:01.240977(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
9dbb8f90948471f0dae833ecffd9f59ff04ffea3
# Dataset Card for "es_mental_health_counseling" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Intuit-GenSRF/es_mental_health_counseling
[ "region:us" ]
2023-10-03T18:21:48+00:00
{"dataset_info": {"features": [{"name": "Context", "dtype": "string"}, {"name": "Response", "dtype": "string"}, {"name": "split", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "text_spanish", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 13763461, "num_examples": 3512}], "download_size": 7425319, "dataset_size": 13763461}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-03T18:21:50+00:00
[]
[]
TAGS #region-us
# Dataset Card for "es_mental_health_counseling" More Information needed
[ "# Dataset Card for \"es_mental_health_counseling\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"es_mental_health_counseling\"\n\nMore Information needed" ]
[ 6, 19 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"es_mental_health_counseling\"\n\nMore Information needed" ]
b6e6c4462b8ce3d19ca70ca52d4bf4bb6e51c201
# Dataset Card for "es_counsel_chat" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Intuit-GenSRF/es_counsel_chat
[ "region:us" ]
2023-10-03T18:22:22+00:00
{"dataset_info": {"features": [{"name": "questionID", "dtype": "int64"}, {"name": "questionTitle", "dtype": "string"}, {"name": "questionText", "dtype": "string"}, {"name": "questionLink", "dtype": "string"}, {"name": "topic", "dtype": "string"}, {"name": "therapistInfo", "dtype": "string"}, {"name": "therapistURL", "dtype": "string"}, {"name": "answerText", "dtype": "string"}, {"name": "upvotes", "dtype": "int64"}, {"name": "views", "dtype": "int64"}, {"name": "split", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "text_spanish", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 10490383, "num_examples": 2612}], "download_size": 5137621, "dataset_size": 10490383}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-03T18:22:24+00:00
[]
[]
TAGS #region-us
# Dataset Card for "es_counsel_chat" More Information needed
[ "# Dataset Card for \"es_counsel_chat\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"es_counsel_chat\"\n\nMore Information needed" ]
[ 6, 17 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"es_counsel_chat\"\n\nMore Information needed" ]
5141399dcee20a39ed01c4a844846ac059bb23e9
# Dataset Card for Evaluation run of PulsarAI/EnsembleV5-Nova-13B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/PulsarAI/EnsembleV5-Nova-13B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [PulsarAI/EnsembleV5-Nova-13B](https://huggingface.co/PulsarAI/EnsembleV5-Nova-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_PulsarAI__EnsembleV5-Nova-13B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-23T15:24:00.966689](https://huggingface.co/datasets/open-llm-leaderboard/details_PulsarAI__EnsembleV5-Nova-13B/blob/main/results_2023-10-23T15-24-00.966689.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.007445469798657718, "em_stderr": 0.0008803652515899855, "f1": 0.08636220637583875, "f1_stderr": 0.0018310737230495444, "acc": 0.4350441276875584, "acc_stderr": 0.010249391454413254 }, "harness|drop|3": { "em": 0.007445469798657718, "em_stderr": 0.0008803652515899855, "f1": 0.08636220637583875, "f1_stderr": 0.0018310737230495444 }, "harness|gsm8k|5": { "acc": 0.10765731614859743, "acc_stderr": 0.008537484003023352 }, "harness|winogrande|5": { "acc": 0.7624309392265194, "acc_stderr": 0.011961298905803157 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_PulsarAI__EnsembleV5-Nova-13B
[ "region:us" ]
2023-10-03T18:23:22+00:00
{"pretty_name": "Evaluation run of PulsarAI/EnsembleV5-Nova-13B", "dataset_summary": "Dataset automatically created during the evaluation run of model [PulsarAI/EnsembleV5-Nova-13B](https://huggingface.co/PulsarAI/EnsembleV5-Nova-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_PulsarAI__EnsembleV5-Nova-13B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-23T15:24:00.966689](https://huggingface.co/datasets/open-llm-leaderboard/details_PulsarAI__EnsembleV5-Nova-13B/blob/main/results_2023-10-23T15-24-00.966689.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.007445469798657718,\n \"em_stderr\": 0.0008803652515899855,\n \"f1\": 0.08636220637583875,\n \"f1_stderr\": 0.0018310737230495444,\n \"acc\": 0.4350441276875584,\n \"acc_stderr\": 0.010249391454413254\n },\n \"harness|drop|3\": {\n \"em\": 0.007445469798657718,\n \"em_stderr\": 0.0008803652515899855,\n \"f1\": 0.08636220637583875,\n \"f1_stderr\": 0.0018310737230495444\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.10765731614859743,\n \"acc_stderr\": 0.008537484003023352\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7624309392265194,\n \"acc_stderr\": 0.011961298905803157\n }\n}\n```", "repo_url": "https://huggingface.co/PulsarAI/EnsembleV5-Nova-13B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|arc:challenge|25_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_23T15_24_00.966689", "path": ["**/details_harness|drop|3_2023-10-23T15-24-00.966689.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-23T15-24-00.966689.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_23T15_24_00.966689", "path": ["**/details_harness|gsm8k|5_2023-10-23T15-24-00.966689.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-23T15-24-00.966689.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hellaswag|10_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-03T19-22-59.151966.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T19-22-59.151966.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-03T19-22-59.151966.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_23T15_24_00.966689", "path": ["**/details_harness|winogrande|5_2023-10-23T15-24-00.966689.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-23T15-24-00.966689.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_03T19_22_59.151966", "path": ["results_2023-10-03T19-22-59.151966.parquet"]}, {"split": "2023_10_23T15_24_00.966689", "path": ["results_2023-10-23T15-24-00.966689.parquet"]}, {"split": "latest", "path": ["results_2023-10-23T15-24-00.966689.parquet"]}]}]}
2023-10-23T14:24:15+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of PulsarAI/EnsembleV5-Nova-13B ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model PulsarAI/EnsembleV5-Nova-13B on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-23T15:24:00.966689(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of PulsarAI/EnsembleV5-Nova-13B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model PulsarAI/EnsembleV5-Nova-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T15:24:00.966689(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of PulsarAI/EnsembleV5-Nova-13B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model PulsarAI/EnsembleV5-Nova-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T15:24:00.966689(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 22, 31, 170, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of PulsarAI/EnsembleV5-Nova-13B## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model PulsarAI/EnsembleV5-Nova-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-23T15:24:00.966689(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]